Merge branch 'master' of https://code.grnet.gr/git/pithos
authorChristos Stathis <chstath@ebs.gr>
Tue, 11 Oct 2011 15:12:08 +0000 (18:12 +0300)
committerChristos Stathis <chstath@ebs.gr>
Tue, 11 Oct 2011 15:12:08 +0000 (18:12 +0300)
README.upgrade [new file with mode: 0644]
docs/source/adminguide.rst
docs/source/devguide.rst
pithos/api/faults.py
pithos/api/functions.py
pithos/api/util.py
pithos/backends/lib/sqlalchemy/dbwrapper.py
pithos/backends/lib/sqlalchemy/node.py
pithos/backends/lib/sqlalchemy/policy.py
pithos/backends/lib/sqlalchemy/public.py
pithos/middleware/auth.py

diff --git a/README.upgrade b/README.upgrade
new file mode 100644 (file)
index 0000000..6277144
--- /dev/null
@@ -0,0 +1 @@
+Upgrade notes.
index e7b4563..97a7284 100644 (file)
@@ -9,7 +9,7 @@ Assuming a clean debian squeeze (stable) installation, use the following steps t
 Install packages::
 
   apt-get install git python-django python-setuptools python-sphinx
-  apt-get install python-sqlalchemy python-psycopg2
+  apt-get install python-sqlalchemy python-mysqldb python-psycopg2
   apt-get install apache2 libapache2-mod-wsgi
 
 Get the source::
@@ -164,7 +164,7 @@ Edit ``/etc/mysql/my.cnf`` to allow network connections and restart the server.
 
 Create database and user::
 
-  CREATE DATABASE pithos;
+  CREATE DATABASE pithos CHARACTER SET utf8 COLLATE utf8_bin;
   GRANT ALL ON pithos.* TO pithos@localhost IDENTIFIED BY 'password';
   GRANT ALL ON pithos.* TO pithos@'%' IDENTIFIED BY 'password';
 
index c69387f..c6ccebe 100644 (file)
@@ -221,7 +221,7 @@ Reply Header Name           Value
 X-Account-Container-Count   The total number of containers
 X-Account-Object-Count      The total number of objects (**TBD**)
 X-Account-Bytes-Used        The total number of bytes stored
-X-Account-Bytes-Remaining   The total number of bytes remaining (**TBD**)
+X-Account-Bytes-Remaining   The total number of bytes remaining
 X-Account-Last-Login        The last login (**TBD**)
 X-Account-Until-Timestamp   The last account modification date until the timestamp provided
 X-Account-Group-*           Optional user defined groups
index d5c3795..f9dd1d1 100644 (file)
@@ -66,6 +66,9 @@ class LengthRequired(Fault):
 class PreconditionFailed(Fault):
     code = 412
 
+class RequestEntityTooLarge(Fault):
+    code = 413
+
 class RangeNotSatisfiable(Fault):
     code = 416
 
index 947014b..00e341f 100644 (file)
@@ -49,7 +49,7 @@ from pithos.api.util import (rename_meta_key, format_header_key, printable_heade
     update_manifest_meta, update_sharing_meta, update_public_meta, validate_modification_preconditions,
     validate_matching_preconditions, split_container_object_string, copy_or_move_object,
     get_int_parameter, get_content_length, get_content_range, socket_read_iterator,
-    object_data_response, put_object_block, hashmap_hash, api_method)
+    object_data_response, put_object_block, hashmap_hash, api_method, json_encode_decimal)
 from pithos.backends import connect_backend
 from pithos.backends.base import NotAllowedError
 
@@ -192,7 +192,7 @@ def account_meta(request, v_account):
     validate_modification_preconditions(request, meta)
     
     response = HttpResponse(status=204)
-    put_account_headers(response, meta, groups)
+    put_account_headers(response, request.quota, meta, groups)
     return response
 
 @api_method('POST')
@@ -240,7 +240,7 @@ def container_list(request, v_account):
     validate_modification_preconditions(request, meta)
     
     response = HttpResponse()
-    put_account_headers(response, meta, groups)
+    put_account_headers(response, request.quota, meta, groups)
     
     marker = request.GET.get('marker')
     limit = get_int_parameter(request.GET.get('limit'))
@@ -329,10 +329,14 @@ def container_create(request, v_account, v_container):
     #                       badRequest (400)
     
     meta, policy = get_container_headers(request)
+    try:
+        if policy and int(policy.get('quota', 0)) > request.quota:
+            policy['quota'] = request.quota
+    except:
+        raise BadRequest('Invalid quota header')
     
     try:
-        request.backend.put_container(request.user, v_account, v_container,
-                                        policy)
+        request.backend.put_container(request.user, v_account, v_container, policy)
         ret = 201
     except NotAllowedError:
         raise Unauthorized('Access denied')
@@ -376,6 +380,11 @@ def container_update(request, v_account, v_container):
         replace = False
     if policy:
         try:
+            if int(policy.get('quota', 0)) > request.quota:
+                policy['quota'] = request.quota
+        except:
+            raise BadRequest('Invalid quota header')
+        try:
             request.backend.update_container_policy(request.user, v_account,
                                                 v_container, policy, replace)
         except NotAllowedError:
@@ -542,7 +551,7 @@ def object_list(request, v_account, v_container):
     if request.serialization == 'xml':
         data = render_to_string('objects.xml', {'container': v_container, 'objects': object_meta})
     elif request.serialization  == 'json':
-        data = json.dumps(object_meta)
+        data = json.dumps(object_meta, default=json_encode_decimal)
     response.status_code = 200
     response.content = data
     return response
@@ -619,7 +628,7 @@ def object_read(request, v_account, v_container, v_object):
             d['object'] = v_object
             data = render_to_string('versions.xml', d)
         elif request.serialization  == 'json':
-            data = json.dumps(d)
+            data = json.dumps(d, default=json_encode_decimal)
         
         response = HttpResponse(data, status=200)
         response['Content-Length'] = len(data)
index a00e2ca..50f6e27 100644 (file)
@@ -55,7 +55,7 @@ import logging
 import re
 import hashlib
 import uuid
-
+import decimal
 
 logger = logging.getLogger(__name__)
 
@@ -70,6 +70,11 @@ class UTC(tzinfo):
    def dst(self, dt):
        return timedelta(0)
 
+def json_encode_decimal(obj):
+    if isinstance(obj, decimal.Decimal):
+        return str(obj)
+    raise TypeError(repr(obj) + " is not JSON serializable")
+
 def isoformat(d):
    """Return an ISO8601 date string that includes a timezone."""
 
@@ -114,11 +119,13 @@ def get_account_headers(request):
             groups[n].remove('')
     return meta, groups
 
-def put_account_headers(response, meta, groups):
+def put_account_headers(response, quota, meta, groups):
     if 'count' in meta:
         response['X-Account-Container-Count'] = meta['count']
     if 'bytes' in meta:
         response['X-Account-Bytes-Used'] = meta['bytes']
+        if quota:
+            response['X-Account-Bytes-Remaining'] = quota - meta['bytes']
     response['Last-Modified'] = http_date(int(meta['modified']))
     for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
         response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
@@ -285,7 +292,6 @@ def copy_or_move_object(request, src_account, src_container, src_name, dest_acco
     """Copy or move an object."""
     
     meta, permissions, public = get_object_headers(request)
-    print '---', meta, permissions, public
     src_version = request.META.get('HTTP_X_SOURCE_VERSION')
     try:
         if move:
index 4091a88..7195660 100644 (file)
@@ -46,6 +46,9 @@ class DBWrapper(object):
                 def connect(self, dbapi_con, con_record):
                     db_cursor = dbapi_con.execute('pragma foreign_keys=ON')
             self.engine = create_engine(db, connect_args={'check_same_thread': False}, poolclass=NullPool, listeners=[ForeignKeysListener()])
+        if db.startswith('mysql://'):
+            db = '%s?charset=utf8&use_unicode=0' %db
+            self.engine = create_engine(db, convert_unicode=True)
         else:
             self.engine = create_engine(db)
         #self.engine.echo = True
index 585c7c0..a19d1c1 100644 (file)
 # or implied, of GRNET S.A.
 
 from time import time
-from sqlalchemy import Table, Integer, BigInteger, Float, Column, String, MetaData, ForeignKey
+from sqlalchemy import Table, Integer, BigInteger, DECIMAL, Column, String, MetaData, ForeignKey
 from sqlalchemy.schema import Index, Sequence
 from sqlalchemy.sql import func, and_, or_, null, select, bindparam
 from sqlalchemy.ext.compiler import compiles
 
 from dbworker import DBWorker
 
-ROOTNODE  = 0
+ROOTNODE  = 1
 
 ( SERIAL, NODE, HASH, SIZE, SOURCE, MTIME, MUSER, CLUSTER ) = range(8)
 
@@ -117,7 +117,7 @@ class Node(DBWorker):
         columns.append(Column('path', String(2048), default='', nullable=False))
         self.nodes = Table('nodes', metadata, *columns)
         # place an index on path
-        Index('idx_nodes_path', self.nodes.c.path, unique=True)
+        Index('idx_nodes_path', self.nodes.c.path)
         
         #create statistics table
         columns=[]
@@ -128,7 +128,7 @@ class Node(DBWorker):
                               primary_key=True))
         columns.append(Column('population', Integer, nullable=False, default=0))
         columns.append(Column('size', BigInteger, nullable=False, default=0))
-        columns.append(Column('mtime', Float))
+        columns.append(Column('mtime', DECIMAL))
         columns.append(Column('cluster', Integer, nullable=False, default=0,
                               primary_key=True))
         self.statistics = Table('statistics', metadata, *columns)
@@ -143,7 +143,7 @@ class Node(DBWorker):
         columns.append(Column('hash', String(255)))
         columns.append(Column('size', BigInteger, nullable=False, default=0))
         columns.append(Column('source', Integer))
-        columns.append(Column('mtime', Float))
+        columns.append(Column('mtime', DECIMAL))
         columns.append(Column('muser', String(255), nullable=False, default=''))
         columns.append(Column('cluster', Integer, nullable=False, default=0))
         self.versions = Table('versions', metadata, *columns)
@@ -255,11 +255,12 @@ class Node(DBWorker):
         c1 = select([self.nodes.c.node],
             self.nodes.c.parent == parent)
         where_clause = and_(self.versions.c.node.in_(c1),
-                            self.versions.c.cluster == cluster,
-                            self.versions.c.mtime <= before)
+                            self.versions.c.cluster == cluster)
         s = select([func.count(self.versions.c.serial),
                     func.sum(self.versions.c.size)])
         s = s.where(where_clause)
+        if before != inf:
+            s = s.where(self.versions.c.mtime <= before)
         r = self.conn.execute(s)
         row = r.fetchone()
         r.close()
@@ -305,9 +306,10 @@ class Node(DBWorker):
         s = select([func.count(self.versions.c.serial),
                     func.sum(self.versions.c.size)])
         where_clause = and_(self.versions.c.node == node,
-                         self.versions.c.cluster == cluster,
-                         self.versions.c.mtime <= before)
+                         self.versions.c.cluster == cluster)
         s = s.where(where_clause)
+        if before != inf:
+            s = s.where(self.versions.c.mtime <= before)
         r = self.conn.execute(s)
         row = r.fetchone()
         nr, size = row[0], row[1]
@@ -450,11 +452,12 @@ class Node(DBWorker):
                     self.versions.c.mtime,
                     self.versions.c.muser,
                     self.versions.c.cluster])
+        filtered = select([func.max(self.versions.c.serial)],
+                            self.versions.c.node == node)
+        if before != inf:
+            filtered = filtered.where(self.versions.c.mtime < before)
         s = s.where(and_(self.versions.c.cluster != except_cluster,
-                         self.versions.c.serial == select(
-                            [func.max(self.versions.c.serial)],
-                            and_(self.versions.c.node == node,
-                            self.versions.c.mtime < before))))
+                         self.versions.c.serial == filtered))
         r = self.conn.execute(s)
         props = r.fetchone()
         r.close()
@@ -467,11 +470,11 @@ class Node(DBWorker):
         s = select([func.count(v.c.serial),
                     func.sum(v.c.size),
                     func.max(v.c.mtime)])
-        c1 = select([func.max(self.versions.c.serial)],
-            and_(self.versions.c.node == v.c.node,
-                 self.versions.c.mtime < before))
+        c1 = select([func.max(self.versions.c.serial)])
+        if before != inf:
+            c1 = c1.where(self.versions.c.mtime < before)
         c2 = select([self.nodes.c.node], self.nodes.c.parent == node)
-        s = s.where(and_(v.c.serial == c1,
+        s = s.where(and_(v.c.serial == c1.where(self.versions.c.node == v.c.node),
                          v.c.cluster != except_cluster,
                          v.c.node.in_(c2)))
         rp = self.conn.execute(s)
@@ -490,8 +493,9 @@ class Node(DBWorker):
                     func.sum(v.c.size),
                     func.max(v.c.mtime)])
         c1 = select([func.max(self.versions.c.serial)],
-            and_(self.versions.c.node == v.c.node,
-                 self.versions.c.mtime < before))
+            self.versions.c.node == v.c.node)
+        if before != inf:
+            c1 = c1.where(self.versions.c.mtime < before)
         c2 = select([self.nodes.c.node], self.nodes.c.path.like(path + '%'))
         s = s.where(and_(v.c.serial == c1,
                          v.c.cluster != except_cluster,
@@ -528,8 +532,9 @@ class Node(DBWorker):
         s = select([v.c.serial, v.c.node, v.c.hash, v.c.size,
                     v.c.source, v.c.mtime, v.c.muser, v.c.cluster])
         c = select([func.max(self.versions.c.serial)],
-            and_(self.versions.c.node == node,
-                 self.versions.c.mtime < before))
+            self.versions.c.node == node)
+        if before != inf:
+            c = c.where(self.versions.c.mtime < before)
         s = s.where(and_(v.c.serial == c,
                          v.c.cluster == cluster))
         r = self.conn.execute(s)
@@ -682,9 +687,10 @@ class Node(DBWorker):
         v = self.versions.alias('v')
         n = self.nodes.alias('n')
         s = select([a.c.key]).distinct()
-        s = s.where(v.c.serial == select([func.max(self.versions.c.serial)],
-                                          and_(self.versions.c.node == v.c.node,
-                                               self.versions.c.mtime < before)))
+        filtered = select([func.max(self.versions.c.serial)])
+        if before != inf:
+            filtered = filtered.where(self.versions.c.mtime < before)
+        s = s.where(v.c.serial == filtered.where(self.versions.c.node == v.c.node))
         s = s.where(v.c.cluster != except_cluster)
         s = s.where(v.c.node.in_(select([self.nodes.c.node],
             self.nodes.c.parent == parent)))
@@ -757,9 +763,10 @@ class Node(DBWorker):
         v = self.versions.alias('v')
         n = self.nodes.alias('n')
         s = select([n.c.path, v.c.serial]).distinct()
-        s = s.where(v.c.serial == select([func.max(self.versions.c.serial)],
-            and_(self.versions.c.node == v.c.node,
-                 self.versions.c.mtime < before)))
+        filtered = select([func.max(self.versions.c.serial)])
+        if before != inf:
+            filtered = filtered.where(self.versions.c.mtime < before)
+        s = s.where(v.c.serial == filtered.where(self.versions.c.node == v.c.node))
         s = s.where(v.c.cluster != except_cluster)
         s = s.where(v.c.node.in_(select([self.nodes.c.node],
             self.nodes.c.parent == parent)))
index cfab422..5513f4c 100644 (file)
@@ -44,8 +44,8 @@ class Policy(DBWorker):
         DBWorker.__init__(self, **params)
         metadata = MetaData()
         columns=[]
-        columns.append(Column('path', String(2048), primary_key=True))
-        columns.append(Column('key', String(255), primary_key=True))
+        columns.append(Column('path', String(2048), index=True))
+        columns.append(Column('key', String(255)))
         columns.append(Column('value', String(255)))
         self.policies = Table('policy', metadata, *columns)
         metadata.create_all(self.engine)
index 4b2327e..ed09ca1 100644 (file)
@@ -42,7 +42,7 @@ class Public(DBWorker):
         DBWorker.__init__(self, **params)
         metadata = MetaData()
         columns=[]
-        columns.append(Column('path', String(2048), primary_key=True))
+        columns.append(Column('path', String(2048), index=True))
         self.public = Table('public', metadata, *columns)
         metadata.create_all(self.engine)
     
index c9aa8f4..20eb393 100644 (file)
@@ -63,3 +63,4 @@ class AuthMiddleware(object):
         
         request.user_obj = user
         request.user = user.uniq
+        request.quota = user.quota