Revision 6e147ecc
b/docs/source/clientlib.rst | ||
---|---|---|
1 | 1 |
Client Library |
2 | 2 |
============== |
3 | 3 |
|
4 |
.. automodule:: pithos.lib.client |
|
4 |
.. automodule:: pithos.tools.lib.client
|
|
5 | 5 |
:show-inheritance: |
6 | 6 |
:members: |
7 | 7 |
:undoc-members: |
b/fabfile.py | ||
---|---|---|
42 | 42 |
env.project_root = "./" |
43 | 43 |
env.develop = False |
44 | 44 |
env.autoremove = True |
45 |
env.packages = ['snf-pithos-lib', 'snf-pithos-backend', 'snf-pithos-app', |
|
46 |
'snf-pithos-tools'] |
|
45 |
env.packages = ['snf-pithos-backend', 'snf-pithos-app', 'snf-pithos-tools'] |
|
47 | 46 |
env.capture = False |
48 | 47 |
env.colors = True |
49 | 48 |
env.pypi_root = 'pypi' |
... | ... | |
171 | 170 |
# |
172 | 171 |
|
173 | 172 |
env.debian_branch = 'debian-0.9' |
174 |
env.deb_packages = ['snf-pithos-lib', 'snf-pithos-backend', |
|
175 |
'snf-pithos-tools', 'snf-pithos-app'] |
|
173 |
env.deb_packages = ['snf-pithos-backend', 'snf-pithos-tools', 'snf-pithos-app'] |
|
176 | 174 |
env.signdebs = False |
177 | 175 |
env.debrelease = False # Increase release number in Debian changelogs |
178 | 176 |
env.upstream = 'packaging' |
b/other/migrate-data | ||
---|---|---|
41 | 41 |
from pithos import settings |
42 | 42 |
from pithos.backends.modular import ModularBackend |
43 | 43 |
|
44 |
from pithos.lib.hashmap import HashMap |
|
44 |
from pithos.tools.lib.hashmap import HashMap
|
|
45 | 45 |
|
46 | 46 |
from migrate import Migration, Cache |
47 | 47 |
|
b/other/migrate-db | ||
---|---|---|
46 | 46 |
from pithos.backends.modular import CLUSTER_NORMAL, CLUSTER_HISTORY, CLUSTER_DELETED |
47 | 47 |
from pithos.backends.lib.sqlalchemy.node import Node, ROOTNODE |
48 | 48 |
|
49 |
from pithos.lib.transfer import upload |
|
50 |
from pithos.lib.hashmap import HashMap |
|
51 |
from pithos.lib.client import Fault |
|
49 |
from pithos.tools.lib.transfer import upload
|
|
50 |
from pithos.tools.lib.hashmap import HashMap
|
|
51 |
from pithos.tools.lib.client import Fault
|
|
52 | 52 |
|
53 | 53 |
from migrate import Migration, Cache |
54 | 54 |
|
b/other/stats-calculator.sql | ||
---|---|---|
1 |
# Top level |
|
2 |
create temporary table tmp_stats as select 0 as "level", 0 as "node", 0 as "parent", count(serial) as "population", sum(size) as "bytes", max(mtime) as "mtime", cluster, false as "final" from versions group by cluster; |
|
3 |
|
|
4 |
# Account level |
|
5 |
insert into tmp_stats select 1 as "level", n.node, n.parent, count(v.serial) as "population", sum(v.size) as "bytes", max(v.mtime) as "mtime", cluster, false as "final" from versions v, nodes n where n.node=v.node and n.parent=0 and n.node!=0 group by node, cluster; |
|
6 |
create temporary table tmp_nodes select distinct node, level from tmp_stats where level=1; |
|
7 |
|
|
8 |
# Container level |
|
9 |
insert into tmp_stats select 2 as "level", n.node, n.parent, count(v.serial) as "population", sum(v.size) as "bytes", max(v.mtime) as "mtime", cluster, false as "final" from versions v, nodes n where n.node=v.node and n.parent in (select node from tmp_nodes where level=1) group by node, cluster; |
|
10 |
insert into tmp_nodes select distinct node, level from tmp_stats where level=2; |
|
11 |
|
|
12 |
# Object level |
|
13 |
insert into tmp_stats select 3 as "level", n.node, n.parent, count(v.serial) as "population", sum(v.size) as "bytes", max(v.mtime) as "mtime", cluster, false as "final" from versions v, nodes n where n.node=v.node and n.parent in (select node from tmp_nodes where level=2) group by node, cluster; |
|
14 |
insert into tmp_nodes select distinct node, level from tmp_stats where level=3; |
|
15 |
|
|
16 |
# Update containers |
|
17 |
create table tmp_sums as select parent as "node", sum(population) as "population", sum(bytes) as "bytes", max(mtime) as "mtime", cluster from tmp_stats where level=3 group by parent, cluster; |
|
18 |
insert into tmp_stats select 2 as "level", n.node, n.parent, t.population, t.bytes, t.mtime, t.cluster, true as "final" from tmp_sums t, nodes n where n.node=t.node; |
|
19 |
drop table tmp_sums; |
|
20 |
|
|
21 |
# Update accounts |
|
22 |
create table tmp_sums as select parent as "node", sum(bytes) as "bytes", max(mtime) as "mtime", cluster from tmp_stats where level=2 group by parent, cluster; |
|
23 |
create table tmp_population as select parent as "node", sum(population) as "population", cluster from tmp_stats where level=2 and final=false group by parent, cluster; |
|
24 |
insert into tmp_stats select 1 as "level", t.node, 0 as "parent", IFNULL(p.population, 0) as "population", t.bytes, t.mtime, t.cluster, true as "final" from tmp_sums t left join tmp_population p on p.node=t.node and p.cluster=t.cluster; |
|
25 |
drop table tmp_sums; |
|
26 |
drop table tmp_population; |
|
27 |
|
|
28 |
# Update top level |
|
29 |
create table tmp_sums as select parent as "node", sum(bytes) as "bytes", max(mtime) as "mtime", cluster from tmp_stats where level=1 group by parent, cluster; |
|
30 |
create table tmp_population as select parent as "node", sum(population) as "population", cluster from tmp_stats where level=1 and final=false group by parent, cluster; |
|
31 |
insert into tmp_stats select 0 as "level", t.node, 0 as "parent", IFNULL(p.population, 0) as "population", t.bytes, t.mtime, t.cluster, true as "final" from tmp_sums t left join tmp_population p on p.node=t.node and p.cluster=t.cluster; |
|
32 |
drop table tmp_sums; |
|
33 |
drop table tmp_population; |
|
34 |
|
|
35 |
# Clean up |
|
36 |
drop table tmp_nodes; |
|
37 |
delete from tmp_stats where final=false; |
b/snf-pithos-app/pithos/api/functions.py | ||
---|---|---|
41 | 41 |
from django.utils.encoding import smart_str |
42 | 42 |
from django.views.decorators.csrf import csrf_exempt |
43 | 43 |
|
44 |
from pithos.lib.user import get_user |
|
45 |
from pithos.lib.filter import parse_filters |
|
44 |
from synnefo.lib.astakos import get_user |
|
46 | 45 |
|
47 | 46 |
from pithos.api.faults import (Fault, NotModified, BadRequest, Unauthorized, Forbidden, ItemNotFound, Conflict, |
48 | 47 |
LengthRequired, PreconditionFailed, RequestEntityTooLarge, RangeNotSatisfiable, UnprocessableEntity) |
... | ... | |
52 | 51 |
validate_modification_preconditions, validate_matching_preconditions, split_container_object_string, |
53 | 52 |
copy_or_move_object, get_int_parameter, get_content_length, get_content_range, socket_read_iterator, |
54 | 53 |
SaveToBackendHandler, object_data_response, put_object_block, hashmap_md5, simple_list_response, api_method) |
55 |
from pithos.backends.base import NotAllowedError, QuotaError |
|
56 | 54 |
from pithos.api.settings import AUTHENTICATION_URL, AUTHENTICATION_USERS |
57 | 55 |
|
56 |
from pithos.backends.base import NotAllowedError, QuotaError |
|
57 |
from pithos.backends.filter import parse_filters |
|
58 |
|
|
58 | 59 |
import logging |
59 | 60 |
import hashlib |
60 | 61 |
|
b/snf-pithos-app/pithos/api/public.py | ||
---|---|---|
36 | 36 |
from django.http import HttpResponse |
37 | 37 |
from django.views.decorators.csrf import csrf_exempt |
38 | 38 |
|
39 |
from pithos.lib.user import get_user
|
|
39 |
from synnefo.lib.astakos import get_user
|
|
40 | 40 |
|
41 | 41 |
from pithos.api.faults import (Fault, BadRequest, ItemNotFound) |
42 | 42 |
from pithos.api.util import (put_object_headers, update_manifest_meta, |
b/snf-pithos-app/pithos/api/util.py | ||
---|---|---|
48 | 48 |
from django.core.files.uploadhandler import FileUploadHandler |
49 | 49 |
from django.core.files.uploadedfile import UploadedFile |
50 | 50 |
|
51 |
from pithos.lib.compat import parse_http_date_safe, parse_http_date
|
|
51 |
from synnefo.lib.parsedate import parse_http_date_safe, parse_http_date
|
|
52 | 52 |
|
53 | 53 |
from pithos.api.faults import (Fault, NotModified, BadRequest, Unauthorized, Forbidden, ItemNotFound, |
54 | 54 |
Conflict, LengthRequired, PreconditionFailed, RequestEntityTooLarge, |
/dev/null | ||
---|---|---|
1 |
from log import LoggingConfigMiddleware |
|
2 |
from secure import SecureMiddleware |
/dev/null | ||
---|---|---|
1 |
# Copyright 2011-2012 GRNET S.A. All rights reserved. |
|
2 |
# |
|
3 |
# Redistribution and use in source and binary forms, with or |
|
4 |
# without modification, are permitted provided that the following |
|
5 |
# conditions are met: |
|
6 |
# |
|
7 |
# 1. Redistributions of source code must retain the above |
|
8 |
# copyright notice, this list of conditions and the following |
|
9 |
# disclaimer. |
|
10 |
# |
|
11 |
# 2. Redistributions in binary form must reproduce the above |
|
12 |
# copyright notice, this list of conditions and the following |
|
13 |
# disclaimer in the documentation and/or other materials |
|
14 |
# provided with the distribution. |
|
15 |
# |
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
# POSSIBILITY OF SUCH DAMAGE. |
|
28 |
# |
|
29 |
# The views and conclusions contained in the software and |
|
30 |
# documentation are those of the authors and should not be |
|
31 |
# interpreted as representing official policies, either expressed |
|
32 |
# or implied, of GRNET S.A. |
|
33 |
|
|
34 |
from django.conf import settings |
|
35 |
from django.core.exceptions import MiddlewareNotUsed |
|
36 |
|
|
37 |
from pithos.lib.dictconfig import dictConfig |
|
38 |
|
|
39 |
import logging |
|
40 |
|
|
41 |
|
|
42 |
class NullHandler(logging.Handler): |
|
43 |
def emit(self, record): |
|
44 |
pass |
|
45 |
|
|
46 |
|
|
47 |
class LoggingConfigMiddleware: |
|
48 |
def __init__(self): |
|
49 |
'''Initialise the logging setup from settings, called on first request.''' |
|
50 |
logging_setting = getattr(settings, 'LOGGING_SETUP', None) |
|
51 |
if logging_setting: |
|
52 |
# Disable handlers that are not used by any logger. |
|
53 |
active_handlers = set() |
|
54 |
loggers = logging_setting.get('loggers', {}) |
|
55 |
for logger in loggers.values(): |
|
56 |
active_handlers.update(logger.get('handlers', [])) |
|
57 |
handlers = logging_setting.get('handlers', {}) |
|
58 |
for handler in handlers: |
|
59 |
if handler not in active_handlers: |
|
60 |
handlers[handler] = {'class': 'logging.NullHandler'} |
|
61 |
|
|
62 |
logging.NullHandler = NullHandler |
|
63 |
dictConfig(logging_setting) |
|
64 |
raise MiddlewareNotUsed('Logging setup only.') |
/dev/null | ||
---|---|---|
1 |
# Copyright 2011-2012 GRNET S.A. All rights reserved. |
|
2 |
# |
|
3 |
# Redistribution and use in source and binary forms, with or |
|
4 |
# without modification, are permitted provided that the following |
|
5 |
# conditions are met: |
|
6 |
# |
|
7 |
# 1. Redistributions of source code must retain the above |
|
8 |
# copyright notice, this list of conditions and the following |
|
9 |
# disclaimer. |
|
10 |
# |
|
11 |
# 2. Redistributions in binary form must reproduce the above |
|
12 |
# copyright notice, this list of conditions and the following |
|
13 |
# disclaimer in the documentation and/or other materials |
|
14 |
# provided with the distribution. |
|
15 |
# |
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
# POSSIBILITY OF SUCH DAMAGE. |
|
28 |
# |
|
29 |
# The views and conclusions contained in the software and |
|
30 |
# documentation are those of the authors and should not be |
|
31 |
# interpreted as representing official policies, either expressed |
|
32 |
# or implied, of GRNET S.A. |
|
33 |
|
|
34 |
class SecureMiddleware(object): |
|
35 |
def process_request(self, request): |
|
36 |
if 'HTTP_X_FORWARDED_PROTOCOL' in request.META: |
|
37 |
request.is_secure = lambda: request.META['HTTP_X_FORWARDED_PROTOCOL'] == 'https' |
b/snf-pithos-backend/pithos/backends/filter.py | ||
---|---|---|
1 |
# Copyright 2011-2012 GRNET S.A. All rights reserved. |
|
2 |
# |
|
3 |
# Redistribution and use in source and binary forms, with or |
|
4 |
# without modification, are permitted provided that the following |
|
5 |
# conditions are met: |
|
6 |
# |
|
7 |
# 1. Redistributions of source code must retain the above |
|
8 |
# copyright notice, this list of conditions and the following |
|
9 |
# disclaimer. |
|
10 |
# |
|
11 |
# 2. Redistributions in binary form must reproduce the above |
|
12 |
# copyright notice, this list of conditions and the following |
|
13 |
# disclaimer in the documentation and/or other materials |
|
14 |
# provided with the distribution. |
|
15 |
# |
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
# POSSIBILITY OF SUCH DAMAGE. |
|
28 |
# |
|
29 |
# The views and conclusions contained in the software and |
|
30 |
# documentation are those of the authors and should not be |
|
31 |
# interpreted as representing official policies, either expressed |
|
32 |
# or implied, of GRNET S.A. |
|
33 |
|
|
34 |
import re |
|
35 |
|
|
36 |
|
|
37 |
_regexfilter = re.compile('(!?)\s*(\S+?)\s*(?:(=|!=|<=|>=|<|>)\s*(\S*?)\s*)?$', re.UNICODE) |
|
38 |
|
|
39 |
|
|
40 |
def parse_filters(terms): |
|
41 |
included = [] |
|
42 |
excluded = [] |
|
43 |
opers = [] |
|
44 |
match = _regexfilter.match |
|
45 |
for term in terms: |
|
46 |
m = match(term) |
|
47 |
if m is None: |
|
48 |
continue |
|
49 |
neg, key, op, value = m.groups() |
|
50 |
if neg: |
|
51 |
excluded.append(key) |
|
52 |
elif op: |
|
53 |
opers.append((key, op, value)) |
|
54 |
elif not value: |
|
55 |
included.append(key) |
|
56 |
|
|
57 |
return included, excluded, opers |
b/snf-pithos-backend/pithos/backends/lib/rabbitmq/queue.py | ||
---|---|---|
31 | 31 |
# interpreted as representing official policies, either expressed |
32 | 32 |
# or implied, of GRNET S.A. |
33 | 33 |
|
34 |
from pithos.lib.queue import exchange_connect, exchange_send, exchange_close, Receipt
|
|
34 |
from synnefo.lib.queue import exchange_connect, exchange_send, exchange_close, Receipt
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
class Queue(object): |
b/snf-pithos-backend/pithos/backends/lib/sqlalchemy/node.py | ||
---|---|---|
41 | 41 |
|
42 | 42 |
from dbworker import DBWorker |
43 | 43 |
|
44 |
from pithos.lib.filter import parse_filters
|
|
44 |
from pithos.backends.filter import parse_filters
|
|
45 | 45 |
|
46 | 46 |
|
47 | 47 |
ROOTNODE = 0 |
b/snf-pithos-backend/pithos/backends/lib/sqlite/node.py | ||
---|---|---|
35 | 35 |
|
36 | 36 |
from dbworker import DBWorker |
37 | 37 |
|
38 |
from pithos.lib.filter import parse_filters
|
|
38 |
from pithos.backends.filter import parse_filters
|
|
39 | 39 |
|
40 | 40 |
|
41 | 41 |
ROOTNODE = 0 |
b/snf-pithos-backend/pithos/backends/modular.py | ||
---|---|---|
36 | 36 |
import time |
37 | 37 |
import uuid as uuidlib |
38 | 38 |
import logging |
39 |
import hashlib |
|
39 | 40 |
import binascii |
40 | 41 |
|
41 | 42 |
from base import DEFAULT_QUOTA, DEFAULT_VERSIONING, NotAllowedError, QuotaError, BaseBackend |
42 | 43 |
|
43 |
from pithos.lib.hashmap import HashMap |
|
44 |
# Stripped-down version of the HashMap class found in tools. |
|
45 |
class HashMap(list): |
|
46 |
|
|
47 |
def __init__(self, blocksize, blockhash): |
|
48 |
super(HashMap, self).__init__() |
|
49 |
self.blocksize = blocksize |
|
50 |
self.blockhash = blockhash |
|
51 |
|
|
52 |
def _hash_raw(self, v): |
|
53 |
h = hashlib.new(self.blockhash) |
|
54 |
h.update(v) |
|
55 |
return h.digest() |
|
56 |
|
|
57 |
def hash(self): |
|
58 |
if len(self) == 0: |
|
59 |
return self._hash_raw('') |
|
60 |
if len(self) == 1: |
|
61 |
return self.__getitem__(0) |
|
62 |
|
|
63 |
h = list(self) |
|
64 |
s = 2 |
|
65 |
while s < len(h): |
|
66 |
s = s * 2 |
|
67 |
h += [('\x00' * len(h[0]))] * (s - len(h)) |
|
68 |
while len(h) > 1: |
|
69 |
h = [self._hash_raw(h[x] + h[x + 1]) for x in range(0, len(h), 2)] |
|
70 |
return h[0] |
|
44 | 71 |
|
45 | 72 |
# Default modules and settings. |
46 | 73 |
DEFAULT_DB_MODULE = 'pithos.backends.lib.sqlalchemy' |
b/snf-pithos-backend/setup.py | ||
---|---|---|
66 | 66 |
# Package requirements |
67 | 67 |
INSTALL_REQUIRES = [ |
68 | 68 |
'snf-common', |
69 |
'snf-pithos-lib', |
|
70 | 69 |
'SQLAlchemy>=0.6.3', |
71 | 70 |
] |
72 | 71 |
|
/dev/null | ||
---|---|---|
1 |
Copyright 2011 GRNET S.A. All rights reserved. |
|
2 |
|
|
3 |
Redistribution and use in source and binary forms, with or |
|
4 |
without modification, are permitted provided that the following |
|
5 |
conditions are met: |
|
6 |
|
|
7 |
1. Redistributions of source code must retain the above |
|
8 |
copyright notice, this list of conditions and the following |
|
9 |
disclaimer. |
|
10 |
|
|
11 |
2. Redistributions in binary form must reproduce the above |
|
12 |
copyright notice, this list of conditions and the following |
|
13 |
disclaimer in the documentation and/or other materials |
|
14 |
provided with the distribution. |
|
15 |
|
|
16 |
THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
POSSIBILITY OF SUCH DAMAGE. |
|
28 |
|
|
29 |
The views and conclusions contained in the software and |
|
30 |
documentation are those of the authors and should not be |
|
31 |
interpreted as representing official policies, either expressed |
|
32 |
or implied, of GRNET S.A. |
/dev/null | ||
---|---|---|
1 |
|
/dev/null | ||
---|---|---|
1 |
recursive-include pithos *.json *.html *.json *.xml *.txt |
|
2 |
recursive-include pithos/ui/static * |
|
3 |
|
|
4 |
include README Changelog |
|
5 |
include distribute_setup.py |
/dev/null | ||
---|---|---|
1 |
|
/dev/null | ||
---|---|---|
1 |
#!python |
|
2 |
"""Bootstrap distribute installation |
|
3 |
|
|
4 |
If you want to use setuptools in your package's setup.py, just include this |
|
5 |
file in the same directory with it, and add this to the top of your setup.py:: |
|
6 |
|
|
7 |
from distribute_setup import use_setuptools |
|
8 |
use_setuptools() |
|
9 |
|
|
10 |
If you want to require a specific version of setuptools, set a download |
|
11 |
mirror, or use an alternate download directory, you can do so by supplying |
|
12 |
the appropriate options to ``use_setuptools()``. |
|
13 |
|
|
14 |
This file can also be run as a script to install or upgrade setuptools. |
|
15 |
""" |
|
16 |
import os |
|
17 |
import sys |
|
18 |
import time |
|
19 |
import fnmatch |
|
20 |
import tempfile |
|
21 |
import tarfile |
|
22 |
from distutils import log |
|
23 |
|
|
24 |
try: |
|
25 |
from site import USER_SITE |
|
26 |
except ImportError: |
|
27 |
USER_SITE = None |
|
28 |
|
|
29 |
try: |
|
30 |
import subprocess |
|
31 |
|
|
32 |
def _python_cmd(*args): |
|
33 |
args = (sys.executable,) + args |
|
34 |
return subprocess.call(args) == 0 |
|
35 |
|
|
36 |
except ImportError: |
|
37 |
# will be used for python 2.3 |
|
38 |
def _python_cmd(*args): |
|
39 |
args = (sys.executable,) + args |
|
40 |
# quoting arguments if windows |
|
41 |
if sys.platform == 'win32': |
|
42 |
def quote(arg): |
|
43 |
if ' ' in arg: |
|
44 |
return '"%s"' % arg |
|
45 |
return arg |
|
46 |
args = [quote(arg) for arg in args] |
|
47 |
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 |
|
48 |
|
|
49 |
DEFAULT_VERSION = "0.6.10" |
|
50 |
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" |
|
51 |
SETUPTOOLS_FAKED_VERSION = "0.6c11" |
|
52 |
|
|
53 |
SETUPTOOLS_PKG_INFO = """\ |
|
54 |
Metadata-Version: 1.0 |
|
55 |
Name: setuptools |
|
56 |
Version: %s |
|
57 |
Summary: xxxx |
|
58 |
Home-page: xxx |
|
59 |
Author: xxx |
|
60 |
Author-email: xxx |
|
61 |
License: xxx |
|
62 |
Description: xxx |
|
63 |
""" % SETUPTOOLS_FAKED_VERSION |
|
64 |
|
|
65 |
|
|
66 |
def _install(tarball): |
|
67 |
# extracting the tarball |
|
68 |
tmpdir = tempfile.mkdtemp() |
|
69 |
log.warn('Extracting in %s', tmpdir) |
|
70 |
old_wd = os.getcwd() |
|
71 |
try: |
|
72 |
os.chdir(tmpdir) |
|
73 |
tar = tarfile.open(tarball) |
|
74 |
_extractall(tar) |
|
75 |
tar.close() |
|
76 |
|
|
77 |
# going in the directory |
|
78 |
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) |
|
79 |
os.chdir(subdir) |
|
80 |
log.warn('Now working in %s', subdir) |
|
81 |
|
|
82 |
# installing |
|
83 |
log.warn('Installing Distribute') |
|
84 |
if not _python_cmd('setup.py', 'install'): |
|
85 |
log.warn('Something went wrong during the installation.') |
|
86 |
log.warn('See the error message above.') |
|
87 |
finally: |
|
88 |
os.chdir(old_wd) |
|
89 |
|
|
90 |
|
|
91 |
def _build_egg(egg, tarball, to_dir): |
|
92 |
# extracting the tarball |
|
93 |
tmpdir = tempfile.mkdtemp() |
|
94 |
log.warn('Extracting in %s', tmpdir) |
|
95 |
old_wd = os.getcwd() |
|
96 |
try: |
|
97 |
os.chdir(tmpdir) |
|
98 |
tar = tarfile.open(tarball) |
|
99 |
_extractall(tar) |
|
100 |
tar.close() |
|
101 |
|
|
102 |
# going in the directory |
|
103 |
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) |
|
104 |
os.chdir(subdir) |
|
105 |
log.warn('Now working in %s', subdir) |
|
106 |
|
|
107 |
# building an egg |
|
108 |
log.warn('Building a Distribute egg in %s', to_dir) |
|
109 |
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) |
|
110 |
|
|
111 |
finally: |
|
112 |
os.chdir(old_wd) |
|
113 |
# returning the result |
|
114 |
log.warn(egg) |
|
115 |
if not os.path.exists(egg): |
|
116 |
raise IOError('Could not build the egg.') |
|
117 |
|
|
118 |
|
|
119 |
def _do_download(version, download_base, to_dir, download_delay): |
|
120 |
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' |
|
121 |
% (version, sys.version_info[0], sys.version_info[1])) |
|
122 |
if not os.path.exists(egg): |
|
123 |
tarball = download_setuptools(version, download_base, |
|
124 |
to_dir, download_delay) |
|
125 |
_build_egg(egg, tarball, to_dir) |
|
126 |
sys.path.insert(0, egg) |
|
127 |
import setuptools |
|
128 |
setuptools.bootstrap_install_from = egg |
|
129 |
|
|
130 |
|
|
131 |
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, |
|
132 |
to_dir=os.curdir, download_delay=15, no_fake=True): |
|
133 |
# making sure we use the absolute path |
|
134 |
to_dir = os.path.abspath(to_dir) |
|
135 |
was_imported = 'pkg_resources' in sys.modules or \ |
|
136 |
'setuptools' in sys.modules |
|
137 |
try: |
|
138 |
try: |
|
139 |
import pkg_resources |
|
140 |
if not hasattr(pkg_resources, '_distribute'): |
|
141 |
if not no_fake: |
|
142 |
_fake_setuptools() |
|
143 |
raise ImportError |
|
144 |
except ImportError: |
|
145 |
return _do_download(version, download_base, to_dir, download_delay) |
|
146 |
try: |
|
147 |
pkg_resources.require("distribute>="+version) |
|
148 |
return |
|
149 |
except pkg_resources.VersionConflict: |
|
150 |
e = sys.exc_info()[1] |
|
151 |
if was_imported: |
|
152 |
sys.stderr.write( |
|
153 |
"The required version of distribute (>=%s) is not available,\n" |
|
154 |
"and can't be installed while this script is running. Please\n" |
|
155 |
"install a more recent version first, using\n" |
|
156 |
"'easy_install -U distribute'." |
|
157 |
"\n\n(Currently using %r)\n" % (version, e.args[0])) |
|
158 |
sys.exit(2) |
|
159 |
else: |
|
160 |
del pkg_resources, sys.modules['pkg_resources'] # reload ok |
|
161 |
return _do_download(version, download_base, to_dir, |
|
162 |
download_delay) |
|
163 |
except pkg_resources.DistributionNotFound: |
|
164 |
return _do_download(version, download_base, to_dir, |
|
165 |
download_delay) |
|
166 |
finally: |
|
167 |
if not no_fake: |
|
168 |
_create_fake_setuptools_pkg_info(to_dir) |
|
169 |
|
|
170 |
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, |
|
171 |
to_dir=os.curdir, delay=15): |
|
172 |
"""Download distribute from a specified location and return its filename |
|
173 |
|
|
174 |
`version` should be a valid distribute version number that is available |
|
175 |
as an egg for download under the `download_base` URL (which should end |
|
176 |
with a '/'). `to_dir` is the directory where the egg will be downloaded. |
|
177 |
`delay` is the number of seconds to pause before an actual download |
|
178 |
attempt. |
|
179 |
""" |
|
180 |
# making sure we use the absolute path |
|
181 |
to_dir = os.path.abspath(to_dir) |
|
182 |
try: |
|
183 |
from urllib.request import urlopen |
|
184 |
except ImportError: |
|
185 |
from urllib2 import urlopen |
|
186 |
tgz_name = "distribute-%s.tar.gz" % version |
|
187 |
url = download_base + tgz_name |
|
188 |
saveto = os.path.join(to_dir, tgz_name) |
|
189 |
src = dst = None |
|
190 |
if not os.path.exists(saveto): # Avoid repeated downloads |
|
191 |
try: |
|
192 |
log.warn("Downloading %s", url) |
|
193 |
src = urlopen(url) |
|
194 |
# Read/write all in one block, so we don't create a corrupt file |
|
195 |
# if the download is interrupted. |
|
196 |
data = src.read() |
|
197 |
dst = open(saveto, "wb") |
|
198 |
dst.write(data) |
|
199 |
finally: |
|
200 |
if src: |
|
201 |
src.close() |
|
202 |
if dst: |
|
203 |
dst.close() |
|
204 |
return os.path.realpath(saveto) |
|
205 |
|
|
206 |
def _no_sandbox(function): |
|
207 |
def __no_sandbox(*args, **kw): |
|
208 |
try: |
|
209 |
from setuptools.sandbox import DirectorySandbox |
|
210 |
if not hasattr(DirectorySandbox, '_old'): |
|
211 |
def violation(*args): |
|
212 |
pass |
|
213 |
DirectorySandbox._old = DirectorySandbox._violation |
|
214 |
DirectorySandbox._violation = violation |
|
215 |
patched = True |
|
216 |
else: |
|
217 |
patched = False |
|
218 |
except ImportError: |
|
219 |
patched = False |
|
220 |
|
|
221 |
try: |
|
222 |
return function(*args, **kw) |
|
223 |
finally: |
|
224 |
if patched: |
|
225 |
DirectorySandbox._violation = DirectorySandbox._old |
|
226 |
del DirectorySandbox._old |
|
227 |
|
|
228 |
return __no_sandbox |
|
229 |
|
|
230 |
def _patch_file(path, content): |
|
231 |
"""Will backup the file then patch it""" |
|
232 |
existing_content = open(path).read() |
|
233 |
if existing_content == content: |
|
234 |
# already patched |
|
235 |
log.warn('Already patched.') |
|
236 |
return False |
|
237 |
log.warn('Patching...') |
|
238 |
_rename_path(path) |
|
239 |
f = open(path, 'w') |
|
240 |
try: |
|
241 |
f.write(content) |
|
242 |
finally: |
|
243 |
f.close() |
|
244 |
return True |
|
245 |
|
|
246 |
_patch_file = _no_sandbox(_patch_file) |
|
247 |
|
|
248 |
def _same_content(path, content): |
|
249 |
return open(path).read() == content |
|
250 |
|
|
251 |
def _rename_path(path): |
|
252 |
new_name = path + '.OLD.%s' % time.time() |
|
253 |
log.warn('Renaming %s into %s', path, new_name) |
|
254 |
os.rename(path, new_name) |
|
255 |
return new_name |
|
256 |
|
|
257 |
def _remove_flat_installation(placeholder): |
|
258 |
if not os.path.isdir(placeholder): |
|
259 |
log.warn('Unkown installation at %s', placeholder) |
|
260 |
return False |
|
261 |
found = False |
|
262 |
for file in os.listdir(placeholder): |
|
263 |
if fnmatch.fnmatch(file, 'setuptools*.egg-info'): |
|
264 |
found = True |
|
265 |
break |
|
266 |
if not found: |
|
267 |
log.warn('Could not locate setuptools*.egg-info') |
|
268 |
return |
|
269 |
|
|
270 |
log.warn('Removing elements out of the way...') |
|
271 |
pkg_info = os.path.join(placeholder, file) |
|
272 |
if os.path.isdir(pkg_info): |
|
273 |
patched = _patch_egg_dir(pkg_info) |
|
274 |
else: |
|
275 |
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) |
|
276 |
|
|
277 |
if not patched: |
|
278 |
log.warn('%s already patched.', pkg_info) |
|
279 |
return False |
|
280 |
# now let's move the files out of the way |
|
281 |
for element in ('setuptools', 'pkg_resources.py', 'site.py'): |
|
282 |
element = os.path.join(placeholder, element) |
|
283 |
if os.path.exists(element): |
|
284 |
_rename_path(element) |
|
285 |
else: |
|
286 |
log.warn('Could not find the %s element of the ' |
|
287 |
'Setuptools distribution', element) |
|
288 |
return True |
|
289 |
|
|
290 |
_remove_flat_installation = _no_sandbox(_remove_flat_installation) |
|
291 |
|
|
292 |
def _after_install(dist): |
|
293 |
log.warn('After install bootstrap.') |
|
294 |
placeholder = dist.get_command_obj('install').install_purelib |
|
295 |
_create_fake_setuptools_pkg_info(placeholder) |
|
296 |
|
|
297 |
def _create_fake_setuptools_pkg_info(placeholder): |
|
298 |
if not placeholder or not os.path.exists(placeholder): |
|
299 |
log.warn('Could not find the install location') |
|
300 |
return |
|
301 |
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) |
|
302 |
setuptools_file = 'setuptools-%s-py%s.egg-info' % \ |
|
303 |
(SETUPTOOLS_FAKED_VERSION, pyver) |
|
304 |
pkg_info = os.path.join(placeholder, setuptools_file) |
|
305 |
if os.path.exists(pkg_info): |
|
306 |
log.warn('%s already exists', pkg_info) |
|
307 |
return |
|
308 |
|
|
309 |
log.warn('Creating %s', pkg_info) |
|
310 |
f = open(pkg_info, 'w') |
|
311 |
try: |
|
312 |
f.write(SETUPTOOLS_PKG_INFO) |
|
313 |
finally: |
|
314 |
f.close() |
|
315 |
|
|
316 |
pth_file = os.path.join(placeholder, 'setuptools.pth') |
|
317 |
log.warn('Creating %s', pth_file) |
|
318 |
f = open(pth_file, 'w') |
|
319 |
try: |
|
320 |
f.write(os.path.join(os.curdir, setuptools_file)) |
|
321 |
finally: |
|
322 |
f.close() |
|
323 |
|
|
324 |
_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) |
|
325 |
|
|
326 |
def _patch_egg_dir(path): |
|
327 |
# let's check if it's already patched |
|
328 |
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') |
|
329 |
if os.path.exists(pkg_info): |
|
330 |
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): |
|
331 |
log.warn('%s already patched.', pkg_info) |
|
332 |
return False |
|
333 |
_rename_path(path) |
|
334 |
os.mkdir(path) |
|
335 |
os.mkdir(os.path.join(path, 'EGG-INFO')) |
|
336 |
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') |
|
337 |
f = open(pkg_info, 'w') |
|
338 |
try: |
|
339 |
f.write(SETUPTOOLS_PKG_INFO) |
|
340 |
finally: |
|
341 |
f.close() |
|
342 |
return True |
|
343 |
|
|
344 |
_patch_egg_dir = _no_sandbox(_patch_egg_dir) |
|
345 |
|
|
346 |
def _before_install(): |
|
347 |
log.warn('Before install bootstrap.') |
|
348 |
_fake_setuptools() |
|
349 |
|
|
350 |
|
|
351 |
def _under_prefix(location): |
|
352 |
if 'install' not in sys.argv: |
|
353 |
return True |
|
354 |
args = sys.argv[sys.argv.index('install')+1:] |
|
355 |
for index, arg in enumerate(args): |
|
356 |
for option in ('--root', '--prefix'): |
|
357 |
if arg.startswith('%s=' % option): |
|
358 |
top_dir = arg.split('root=')[-1] |
|
359 |
return location.startswith(top_dir) |
|
360 |
elif arg == option: |
|
361 |
if len(args) > index: |
|
362 |
top_dir = args[index+1] |
|
363 |
return location.startswith(top_dir) |
|
364 |
if arg == '--user' and USER_SITE is not None: |
|
365 |
return location.startswith(USER_SITE) |
|
366 |
return True |
|
367 |
|
|
368 |
|
|
369 |
def _fake_setuptools(): |
|
370 |
log.warn('Scanning installed packages') |
|
371 |
try: |
|
372 |
import pkg_resources |
|
373 |
except ImportError: |
|
374 |
# we're cool |
|
375 |
log.warn('Setuptools or Distribute does not seem to be installed.') |
|
376 |
return |
|
377 |
ws = pkg_resources.working_set |
|
378 |
try: |
|
379 |
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', |
|
380 |
replacement=False)) |
|
381 |
except TypeError: |
|
382 |
# old distribute API |
|
383 |
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) |
|
384 |
|
|
385 |
if setuptools_dist is None: |
|
386 |
log.warn('No setuptools distribution found') |
|
387 |
return |
|
388 |
# detecting if it was already faked |
|
389 |
setuptools_location = setuptools_dist.location |
|
390 |
log.warn('Setuptools installation detected at %s', setuptools_location) |
|
391 |
|
|
392 |
# if --root or --preix was provided, and if |
|
393 |
# setuptools is not located in them, we don't patch it |
|
394 |
if not _under_prefix(setuptools_location): |
|
395 |
log.warn('Not patching, --root or --prefix is installing Distribute' |
|
396 |
' in another location') |
|
397 |
return |
|
398 |
|
|
399 |
# let's see if its an egg |
|
400 |
if not setuptools_location.endswith('.egg'): |
|
401 |
log.warn('Non-egg installation') |
|
402 |
res = _remove_flat_installation(setuptools_location) |
|
403 |
if not res: |
|
404 |
return |
|
405 |
else: |
|
406 |
log.warn('Egg installation') |
|
407 |
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') |
|
408 |
if (os.path.exists(pkg_info) and |
|
409 |
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)): |
|
410 |
log.warn('Already patched.') |
|
411 |
return |
|
412 |
log.warn('Patching...') |
|
413 |
# let's create a fake egg replacing setuptools one |
|
414 |
res = _patch_egg_dir(setuptools_location) |
|
415 |
if not res: |
|
416 |
return |
|
417 |
log.warn('Patched done.') |
|
418 |
_relaunch() |
|
419 |
|
|
420 |
|
|
421 |
def _relaunch(): |
|
422 |
log.warn('Relaunching...') |
|
423 |
# we have to relaunch the process |
|
424 |
# pip marker to avoid a relaunch bug |
|
425 |
if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: |
|
426 |
sys.argv[0] = 'setup.py' |
|
427 |
args = [sys.executable] + sys.argv |
|
428 |
sys.exit(subprocess.call(args)) |
|
429 |
|
|
430 |
|
|
431 |
def _extractall(self, path=".", members=None): |
|
432 |
"""Extract all members from the archive to the current working |
|
433 |
directory and set owner, modification time and permissions on |
|
434 |
directories afterwards. `path' specifies a different directory |
|
435 |
to extract to. `members' is optional and must be a subset of the |
|
436 |
list returned by getmembers(). |
|
437 |
""" |
|
438 |
import copy |
|
439 |
import operator |
|
440 |
from tarfile import ExtractError |
|
441 |
directories = [] |
|
442 |
|
|
443 |
if members is None: |
|
444 |
members = self |
|
445 |
|
|
446 |
for tarinfo in members: |
|
447 |
if tarinfo.isdir(): |
|
448 |
# Extract directories with a safe mode. |
|
449 |
directories.append(tarinfo) |
|
450 |
tarinfo = copy.copy(tarinfo) |
|
451 |
tarinfo.mode = 448 # decimal for oct 0700 |
|
452 |
self.extract(tarinfo, path) |
|
453 |
|
|
454 |
# Reverse sort directories. |
|
455 |
if sys.version_info < (2, 4): |
|
456 |
def sorter(dir1, dir2): |
|
457 |
return cmp(dir1.name, dir2.name) |
|
458 |
directories.sort(sorter) |
|
459 |
directories.reverse() |
|
460 |
else: |
|
461 |
directories.sort(key=operator.attrgetter('name'), reverse=True) |
|
462 |
|
|
463 |
# Set correct owner, mtime and filemode on directories. |
|
464 |
for tarinfo in directories: |
|
465 |
dirpath = os.path.join(path, tarinfo.name) |
|
466 |
try: |
|
467 |
self.chown(tarinfo, dirpath) |
|
468 |
self.utime(tarinfo, dirpath) |
|
469 |
self.chmod(tarinfo, dirpath) |
|
470 |
except ExtractError: |
|
471 |
e = sys.exc_info()[1] |
|
472 |
if self.errorlevel > 1: |
|
473 |
raise |
|
474 |
else: |
|
475 |
self._dbg(1, "tarfile: %s" % e) |
|
476 |
|
|
477 |
|
|
478 |
def main(argv, version=DEFAULT_VERSION): |
|
479 |
"""Install or upgrade setuptools and EasyInstall""" |
|
480 |
tarball = download_setuptools() |
|
481 |
_install(tarball) |
|
482 |
|
|
483 |
|
|
484 |
if __name__ == '__main__': |
|
485 |
main(sys.argv[1:]) |
/dev/null | ||
---|---|---|
1 |
# Copyright 2011 GRNET S.A. All rights reserved. |
|
2 |
# |
|
3 |
# Redistribution and use in source and binary forms, with or |
|
4 |
# without modification, are permitted provided that the following |
|
5 |
# conditions are met: |
|
6 |
# |
|
7 |
# 1. Redistributions of source code must retain the above |
|
8 |
# copyright notice, this list of conditions and the following |
|
9 |
# disclaimer. |
|
10 |
# |
|
11 |
# 2. Redistributions in binary form must reproduce the above |
|
12 |
# copyright notice, this list of conditions and the following |
|
13 |
# disclaimer in the documentation and/or other materials |
|
14 |
# provided with the distribution. |
|
15 |
# |
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
# POSSIBILITY OF SUCH DAMAGE. |
|
28 |
# |
|
29 |
# The views and conclusions contained in the software and |
|
30 |
# documentation are those of the authors and should not be |
|
31 |
# interpreted as representing official policies, either expressed |
|
32 |
# or implied, of GRNET S.A. |
|
33 |
|
|
34 |
# this is a namespace package |
|
35 |
try: |
|
36 |
import pkg_resources |
|
37 |
pkg_resources.declare_namespace(__name__) |
|
38 |
except ImportError: |
|
39 |
import pkgutil |
|
40 |
__path__ = pkgutil.extend_path(__path__, __name__) |
/dev/null | ||
---|---|---|
1 |
# Copyright 2011-2012 GRNET S.A. All rights reserved. |
|
2 |
# |
|
3 |
# Redistribution and use in source and binary forms, with or |
|
4 |
# without modification, are permitted provided that the following |
|
5 |
# conditions are met: |
|
6 |
# |
|
7 |
# 1. Redistributions of source code must retain the above |
|
8 |
# copyright notice, this list of conditions and the following |
|
9 |
# disclaimer. |
|
10 |
# |
|
11 |
# 2. Redistributions in binary form must reproduce the above |
|
12 |
# copyright notice, this list of conditions and the following |
|
13 |
# disclaimer in the documentation and/or other materials |
|
14 |
# provided with the distribution. |
|
15 |
# |
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS |
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR |
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF |
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
27 |
# POSSIBILITY OF SUCH DAMAGE. |
|
28 |
# |
|
29 |
# The views and conclusions contained in the software and |
|
30 |
# documentation are those of the authors and should not be |
|
31 |
# interpreted as representing official policies, either expressed |
|
32 |
# or implied, of GRNET S.A. |
|
33 |
|
|
34 |
from httplib import HTTPConnection, HTTPSConnection, HTTP |
|
35 |
from sys import stdin |
|
36 |
from xml.dom import minidom |
|
37 |
from StringIO import StringIO |
|
38 |
from urllib import quote, unquote |
|
39 |
from urlparse import urlparse |
|
40 |
|
|
41 |
import json |
|
42 |
import types |
|
43 |
import socket |
|
44 |
import urllib |
|
45 |
import datetime |
|
46 |
|
|
47 |
ERROR_CODES = {304:'Not Modified', |
|
48 |
400:'Bad Request', |
|
49 |
401:'Unauthorized', |
|
50 |
403:'Forbidden', |
|
51 |
404:'Not Found', |
|
52 |
409:'Conflict', |
|
53 |
411:'Length Required', |
|
54 |
412:'Precondition Failed', |
|
55 |
413:'Request Entity Too Large', |
|
56 |
416:'Range Not Satisfiable', |
|
57 |
422:'Unprocessable Entity', |
|
58 |
500:'Internal Server Error', |
|
59 |
501:'Not Implemented'} |
|
60 |
|
|
61 |
class Fault(Exception): |
|
62 |
def __init__(self, data='', status=None): |
|
63 |
if data == '' and status in ERROR_CODES.keys(): |
|
64 |
data = ERROR_CODES[status] |
|
65 |
Exception.__init__(self, data) |
|
66 |
self.data = data |
|
67 |
self.status = status |
|
68 |
|
|
69 |
class Client(object): |
|
70 |
def __init__(self, url, token, account, verbose=False, debug=False): |
|
71 |
"""`url` can also include a port, e.g '127.0.0.1:8000'.""" |
|
72 |
|
|
73 |
self.url = url |
|
74 |
self.account = account |
|
75 |
self.verbose = verbose or debug |
|
76 |
self.debug = debug |
|
77 |
self.token = token |
|
78 |
|
|
79 |
def _req(self, method, path, body=None, headers={}, format='text', params={}): |
|
80 |
p = urlparse(self.url) |
|
81 |
if p.scheme == 'http': |
|
82 |
conn = HTTPConnection(p.netloc) |
|
83 |
elif p.scheme == 'https': |
|
84 |
conn = HTTPSConnection(p.netloc) |
|
85 |
else: |
|
86 |
raise Exception('Unknown URL scheme') |
|
87 |
|
|
88 |
full_path = _prepare_path(p.path + path, format, params) |
|
89 |
|
|
90 |
kwargs = {} |
|
91 |
kwargs['headers'] = _prepare_headers(headers) |
|
92 |
kwargs['headers']['X-Auth-Token'] = self.token |
|
93 |
if body: |
|
94 |
kwargs['body'] = body |
|
95 |
kwargs['headers'].setdefault('content-type', 'application/octet-stream') |
|
96 |
kwargs['headers'].setdefault('content-length', len(body) if body else 0) |
|
97 |
|
|
98 |
#print '#', method, full_path, kwargs |
|
99 |
#t1 = datetime.datetime.utcnow() |
|
100 |
conn.request(method, full_path, **kwargs) |
|
101 |
|
|
102 |
resp = conn.getresponse() |
|
103 |
#t2 = datetime.datetime.utcnow() |
|
104 |
#print 'response time:', str(t2-t1) |
|
105 |
return _handle_response(resp, self.verbose, self.debug) |
|
106 |
|
|
107 |
def _chunked_transfer(self, path, method='PUT', f=stdin, headers=None, |
|
108 |
blocksize=1024, params={}): |
|
109 |
"""perfomrs a chunked request""" |
|
110 |
p = urlparse(self.url) |
|
111 |
if p.scheme == 'http': |
|
112 |
conn = HTTPConnection(p.netloc) |
|
113 |
elif p.scheme == 'https': |
|
114 |
conn = HTTPSConnection(p.netloc) |
|
115 |
else: |
|
116 |
raise Exception('Unknown URL scheme') |
|
117 |
|
|
118 |
full_path = _prepare_path(p.path + path, params=params) |
|
119 |
|
|
120 |
conn.putrequest(method, full_path) |
|
121 |
conn.putheader('x-auth-token', self.token) |
|
122 |
conn.putheader('content-type', 'application/octet-stream') |
|
123 |
conn.putheader('transfer-encoding', 'chunked') |
|
124 |
for k,v in _prepare_headers(headers).items(): |
|
125 |
conn.putheader(k, v) |
|
126 |
conn.endheaders() |
|
127 |
|
|
128 |
# write body |
|
129 |
data = '' |
|
130 |
while True: |
|
131 |
if f.closed: |
|
132 |
break |
|
133 |
block = f.read(blocksize) |
|
134 |
if block == '': |
|
135 |
break |
|
136 |
data = '%x\r\n%s\r\n' % (len(block), block) |
|
137 |
try: |
|
138 |
conn.send(data) |
|
139 |
except: |
|
140 |
#retry |
|
141 |
conn.send(data) |
|
142 |
data = '0\r\n\r\n' |
|
143 |
try: |
|
144 |
conn.send(data) |
|
145 |
except: |
|
146 |
#retry |
|
147 |
conn.send(data) |
|
148 |
|
|
149 |
resp = conn.getresponse() |
|
150 |
return _handle_response(resp, self.verbose, self.debug) |
|
151 |
|
|
152 |
def delete(self, path, format='text', params={}): |
|
153 |
return self._req('DELETE', path, format=format, params=params) |
|
154 |
|
|
155 |
def get(self, path, format='text', headers={}, params={}): |
|
156 |
return self._req('GET', path, headers=headers, format=format, |
|
157 |
params=params) |
|
158 |
|
|
159 |
def head(self, path, format='text', params={}): |
|
160 |
return self._req('HEAD', path, format=format, params=params) |
|
161 |
|
|
162 |
def post(self, path, body=None, format='text', headers=None, params={}): |
|
163 |
return self._req('POST', path, body, headers=headers, format=format, |
|
164 |
params=params) |
|
165 |
|
|
166 |
def put(self, path, body=None, format='text', headers=None, params={}): |
|
167 |
return self._req('PUT', path, body, headers=headers, format=format, |
|
168 |
params=params) |
|
169 |
|
|
170 |
def _list(self, path, format='text', params={}, **headers): |
|
171 |
status, headers, data = self.get(path, format=format, headers=headers, |
|
172 |
params=params) |
|
173 |
if format == 'json': |
|
174 |
data = json.loads(data) if data else '' |
|
175 |
elif format == 'xml': |
|
176 |
data = minidom.parseString(data) |
|
177 |
else: |
|
178 |
data = data.split('\n')[:-1] if data else '' |
|
179 |
return data |
|
180 |
|
|
181 |
def _get_metadata(self, path, prefix=None, params={}): |
|
182 |
status, headers, data = self.head(path, params=params) |
|
183 |
prefixlen = len(prefix) if prefix else 0 |
|
184 |
meta = {} |
|
185 |
for key, val in headers.items(): |
|
186 |
if prefix and not key.startswith(prefix): |
|
187 |
continue |
|
188 |
elif prefix and key.startswith(prefix): |
|
189 |
key = key[prefixlen:] |
|
190 |
meta[key] = val |
|
191 |
return meta |
|
192 |
|
|
193 |
def _filter(self, l, d): |
|
194 |
""" |
|
195 |
filter out from l elements having the metadata values provided |
|
196 |
""" |
|
197 |
ll = l |
|
198 |
for elem in l: |
|
199 |
if type(elem) == types.DictionaryType: |
|
200 |
for key in d.keys(): |
|
201 |
k = 'x_object_meta_%s' % key |
|
202 |
if k in elem.keys() and elem[k] == d[key]: |
|
203 |
ll.remove(elem) |
|
204 |
break |
|
205 |
return ll |
|
206 |
|
|
207 |
class OOS_Client(Client): |
|
208 |
"""Openstack Object Storage Client""" |
|
209 |
|
|
210 |
def _update_metadata(self, path, entity, **meta): |
|
211 |
"""adds new and updates the values of previously set metadata""" |
|
212 |
ex_meta = self.retrieve_account_metadata(restricted=True) |
|
213 |
ex_meta.update(meta) |
|
214 |
headers = {} |
|
215 |
prefix = 'x-%s-meta-' % entity |
|
216 |
for k,v in ex_meta.items(): |
|
217 |
k = '%s%s' % (prefix, k) |
|
218 |
headers[k] = v |
|
219 |
return self.post(path, headers=headers) |
|
220 |
|
|
221 |
def _reset_metadata(self, path, entity, **meta): |
|
222 |
""" |
|
223 |
overwrites all user defined metadata |
|
224 |
""" |
|
225 |
headers = {} |
|
226 |
prefix = 'x-%s-meta-' % entity |
|
227 |
for k,v in meta.items(): |
|
228 |
k = '%s%s' % (prefix, k) |
|
229 |
headers[k] = v |
|
230 |
return self.post(path, headers=headers) |
|
231 |
|
|
232 |
def _delete_metadata(self, path, entity, meta=[]): |
|
233 |
"""delete previously set metadata""" |
|
234 |
ex_meta = self.retrieve_account_metadata(restricted=True) |
|
235 |
headers = {} |
|
236 |
prefix = 'x-%s-meta-' % entity |
|
237 |
for k in ex_meta.keys(): |
|
238 |
if k in meta: |
|
239 |
headers['%s%s' % (prefix, k)] = ex_meta[k] |
|
240 |
return self.post(path, headers=headers) |
|
241 |
|
|
242 |
# Storage Account Services |
|
243 |
|
|
244 |
def list_containers(self, format='text', limit=None, |
|
245 |
marker=None, params={}, account=None, **headers): |
|
246 |
"""lists containers""" |
|
247 |
account = account or self.account |
|
248 |
path = '/%s' % account |
|
249 |
params.update({'limit':limit, 'marker':marker}) |
|
250 |
return self._list(path, format, params, **headers) |
|
251 |
|
|
252 |
def retrieve_account_metadata(self, restricted=False, account=None, **params): |
|
253 |
"""returns the account metadata""" |
|
254 |
account = account or self.account |
|
255 |
path = '/%s' % account |
|
256 |
prefix = 'x-account-meta-' if restricted else None |
|
257 |
return self._get_metadata(path, prefix, params) |
|
258 |
|
|
259 |
def update_account_metadata(self, account=None, **meta): |
|
260 |
"""updates the account metadata""" |
|
261 |
account = account or self.account |
|
262 |
path = '/%s' % account |
|
263 |
return self._update_metadata(path, 'account', **meta) |
|
264 |
|
|
265 |
def delete_account_metadata(self, meta=[], account=None): |
|
266 |
"""deletes the account metadata""" |
|
267 |
account = account or self.account |
|
268 |
path = '/%s' % account |
|
269 |
return self._delete_metadata(path, 'account', meta) |
|
270 |
|
|
271 |
def reset_account_metadata(self, account=None, **meta): |
|
272 |
"""resets account metadata""" |
|
273 |
account = account or self.account |
|
274 |
path = '/%s' % account |
|
275 |
return self._reset_metadata(path, 'account', **meta) |
|
276 |
|
|
277 |
# Storage Container Services |
|
278 |
|
|
279 |
def _filter_trashed(self, l): |
|
280 |
return self._filter(l, {'trash':'true'}) |
|
281 |
|
|
282 |
def list_objects(self, container, format='text', |
|
283 |
limit=None, marker=None, prefix=None, delimiter=None, |
|
284 |
path=None, include_trashed=False, params={}, account=None, |
|
285 |
**headers): |
|
286 |
"""returns a list with the container objects""" |
|
287 |
account = account or self.account |
|
288 |
params.update({'limit':limit, 'marker':marker, 'prefix':prefix, |
|
289 |
'delimiter':delimiter, 'path':path}) |
|
290 |
l = self._list('/%s/%s' % (account, container), format, params, |
|
291 |
**headers) |
|
292 |
#TODO support filter trashed with xml also |
|
293 |
if format != 'xml' and not include_trashed: |
|
294 |
l = self._filter_trashed(l) |
|
295 |
return l |
|
296 |
|
|
297 |
def create_container(self, container, account=None, meta={}, **headers): |
|
298 |
"""creates a container""" |
|
299 |
account = account or self.account |
|
300 |
if not headers: |
|
301 |
headers = {} |
|
302 |
for k,v in meta.items(): |
|
303 |
headers['x-container-meta-%s' %k.strip().upper()] = v.strip() |
|
304 |
status, header, data = self.put('/%s/%s' % (account, container), |
|
305 |
headers=headers) |
|
306 |
if status == 202: |
|
307 |
return False |
|
308 |
elif status != 201: |
|
309 |
raise Fault(data, int(status)) |
|
310 |
return True |
|
311 |
|
|
312 |
def delete_container(self, container, params={}, account=None): |
|
313 |
"""deletes a container""" |
|
314 |
account = account or self.account |
|
315 |
return self.delete('/%s/%s' % (account, container), params=params) |
|
316 |
|
|
317 |
def retrieve_container_metadata(self, container, restricted=False, |
|
318 |
account=None, **params): |
|
319 |
"""returns the container metadata""" |
|
320 |
account = account or self.account |
|
321 |
prefix = 'x-container-meta-' if restricted else None |
|
322 |
return self._get_metadata('/%s/%s' % (account, container), prefix, |
|
323 |
params) |
|
324 |
|
|
325 |
def update_container_metadata(self, container, account=None, **meta): |
|
326 |
"""unpdates the container metadata""" |
|
327 |
account = account or self.account |
|
328 |
return self._update_metadata('/%s/%s' % (account, container), |
|
329 |
'container', **meta) |
|
330 |
|
|
331 |
def delete_container_metadata(self, container, meta=[], account=None): |
|
332 |
"""deletes the container metadata""" |
|
333 |
account = account or self.account |
|
334 |
path = '/%s/%s' % (account, container) |
|
335 |
return self._delete_metadata(path, 'container', meta) |
|
336 |
|
|
337 |
# Storage Object Services |
|
338 |
|
|
339 |
def request_object(self, container, object, format='text', params={}, |
|
340 |
account=None, **headers): |
|
341 |
"""returns tuple containing the status, headers and data response for an object request""" |
|
342 |
account = account or self.account |
|
343 |
path = '/%s/%s/%s' % (account, container, object) |
|
344 |
status, headers, data = self.get(path, format, headers, params) |
|
345 |
return status, headers, data |
|
346 |
|
|
347 |
def retrieve_object(self, container, object, format='text', params={}, |
|
348 |
account=None, **headers): |
|
349 |
"""returns an object's data""" |
|
350 |
account = account or self.account |
|
351 |
t = self.request_object(container, object, format, params, account, |
|
352 |
**headers) |
|
353 |
data = t[2] |
|
354 |
if format == 'json': |
|
355 |
data = json.loads(data) if data else '' |
|
356 |
elif format == 'xml': |
|
357 |
data = minidom.parseString(data) |
|
358 |
return data |
|
359 |
|
|
360 |
def retrieve_object_hashmap(self, container, object, format='json', params={}, |
|
361 |
account=None, **headers): |
|
362 |
"""returns the hashmap representing object's data""" |
|
363 |
if not params: |
|
364 |
params = {} |
|
365 |
params.update({'hashmap':None}) |
|
366 |
return self.retrieve_object(container, object, params, format, account, **headers) |
|
367 |
|
|
368 |
def create_directory_marker(self, container, object, account=None): |
|
369 |
"""creates a dierectory marker""" |
|
370 |
account = account or self.account |
|
371 |
if not object: |
|
372 |
raise Fault('Directory markers have to be nested in a container') |
|
373 |
h = {'content_type':'application/directory'} |
|
374 |
return self.create_zero_length_object(container, object, account=account, |
|
375 |
**h) |
|
376 |
|
|
377 |
def create_object(self, container, object, f=stdin, format='text', meta={}, |
|
378 |
params={}, etag=None, content_type=None, content_encoding=None, |
|
379 |
content_disposition=None, account=None, **headers): |
|
380 |
"""creates a zero-length object""" |
|
381 |
account = account or self.account |
|
382 |
path = '/%s/%s/%s' % (account, container, object) |
|
383 |
for k, v in headers.items(): |
|
384 |
if v == None: |
|
385 |
headers.pop(k) |
|
386 |
|
|
387 |
l = ['etag', 'content_encoding', 'content_disposition', 'content_type'] |
|
388 |
l = [elem for elem in l if eval(elem)] |
|
389 |
for elem in l: |
|
390 |
headers.update({elem:eval(elem)}) |
|
391 |
headers.setdefault('content-type', 'application/octet-stream') |
|
392 |
|
|
393 |
for k,v in meta.items(): |
|
394 |
headers['x-object-meta-%s' %k.strip()] = v.strip() |
|
395 |
data = f.read() if f else None |
|
396 |
return self.put(path, data, format, headers=headers, params=params) |
|
397 |
|
|
398 |
def create_zero_length_object(self, container, object, meta={}, etag=None, |
|
399 |
content_type=None, content_encoding=None, |
|
400 |
content_disposition=None, account=None, |
|
401 |
**headers): |
|
402 |
account = account or self.account |
|
403 |
args = locals().copy() |
|
404 |
for elem in ['self', 'container', 'headers', 'account']: |
|
405 |
args.pop(elem) |
|
406 |
args.update(headers) |
|
407 |
return self.create_object(container, account=account, f=None, **args) |
|
408 |
|
|
409 |
def update_object(self, container, object, f=stdin, |
|
410 |
offset=None, meta={}, params={}, content_length=None, |
|
411 |
content_type=None, content_encoding=None, |
|
412 |
content_disposition=None, account=None, **headers): |
|
413 |
account = account or self.account |
|
414 |
path = '/%s/%s/%s' % (account, container, object) |
|
415 |
for k, v in headers.items(): |
|
416 |
if v == None: |
|
417 |
headers.pop(k) |
|
418 |
|
|
419 |
l = ['content_encoding', 'content_disposition', 'content_type', |
|
420 |
'content_length'] |
|
421 |
l = [elem for elem in l if eval(elem)] |
|
422 |
for elem in l: |
|
423 |
headers.update({elem:eval(elem)}) |
|
424 |
|
|
425 |
if 'content_range' not in headers.keys(): |
|
426 |
if offset != None: |
|
427 |
headers['content_range'] = 'bytes %s-/*' % offset |
|
428 |
else: |
|
429 |
headers['content_range'] = 'bytes */*' |
|
430 |
|
|
431 |
for k,v in meta.items(): |
|
432 |
headers['x-object-meta-%s' %k.strip()] = v.strip() |
|
433 |
data = f.read() if f else None |
|
434 |
return self.post(path, data, headers=headers, params=params) |
|
435 |
|
|
436 |
def update_object_using_chunks(self, container, object, f=stdin, |
|
437 |
blocksize=1024, offset=None, meta={}, |
|
438 |
params={}, content_type=None, content_encoding=None, |
|
439 |
content_disposition=None, account=None, **headers): |
|
440 |
"""updates an object (incremental upload)""" |
|
441 |
account = account or self.account |
|
442 |
path = '/%s/%s/%s' % (account, container, object) |
|
443 |
headers = headers if not headers else {} |
|
444 |
l = ['content_type', 'content_encoding', 'content_disposition'] |
Also available in: Unified diff