Revision 0534576c

b/snf-common/synnefo/settings/test.py
32 32
if SNF_TEST_USE_POSTGRES:
33 33
    DATABASES['default'] = {
34 34
        'ENGINE': 'django.db.backends.postgresql_psycopg2',
35
        'NAME': 'pithos',
36
        'TEST_NAME': 'test_pithos',
35
        'NAME': 'synnefo_db',
36
        'TEST_NAME': 'test_synnefo_db',
37 37
        'USER': 'postgres',
38 38
        'PORT': '5432',
39 39
    }
40
    PITHOS_BACKEND_DB_CONNECTION = 'postgresql://postgres@:5432/test_pithos'
40
    PITHOS_BACKEND_DB_CONNECTION = (
41
        'postgresql://%(USER)s@:%(PORT)s/%(TEST_NAME)s' % DATABASES['default'])
41 42
elif SNF_TEST_PITHOS_SQLITE_MODULE:
42
    BACKEND_DB_MODULE = 'pithos.backends.lib.sqlite'
43
    PITHOS_BACKEND_POOL_ENABLED = False
44
    PITHOS_BACKEND_DB_MODULE = 'pithos.backends.lib.sqlite'
43 45
    PITHOS_BACKEND_DB_CONNECTION = DATABASES['default']['NAME']
44 46

  
45 47
if SNF_TEST_PITHOS_UPDATE_MD5:
b/snf-pithos-app/pithos/api/test/__init__.py
47 47

  
48 48
from django.test import TestCase
49 49
from django.utils.http import urlencode
50
from django.conf import settings
51 50

  
52 51
import django.utils.simplejson as json
53 52

  
......
91 90

  
92 91
class PithosAPITest(TestCase):
93 92
    def setUp(self):
94
        pithos_settings.BACKEND_DB_MODULE = 'pithos.backends.lib.sqlalchemy'
95
        pithos_settings.BACKEND_DB_CONNECTION = django_to_sqlalchemy()
96
        pithos_settings.BACKEND_POOL_SIZE = 1
97

  
98 93
        # Override default block size to spead up tests
99 94
        pithos_settings.BACKEND_BLOCK_SIZE = TEST_BLOCK_SIZE
100 95
        pithos_settings.BACKEND_HASH_ALGORITHM = TEST_HASH_ALGORITHM
......
458 453
        uuid = map['x-object-uuid']
459 454
        assert(uuid == self.uuid)
460 455

  
461

  
462
django_sqlalchemy_engines = {
463
    'django.db.backends.postgresql_psycopg2': 'postgresql+psycopg2',
464
    'django.db.backends.postgresql': 'postgresql',
465
    'django.db.backends.mysql': '',
466
    'django.db.backends.sqlite3': 'mssql',
467
    'django.db.backends.oracle': 'oracle'}
468

  
469

  
470
def django_to_sqlalchemy():
471
    """Convert the django default database to sqlalchemy connection string"""
472
    # TODO support for more complex configuration
473
    db = settings.DATABASES['default']
474
    name = db.get('TEST_NAME', 'test_%s' % db['NAME'])
475
    if db['ENGINE'] == 'django.db.backends.sqlite3':
476
        db.get('TEST_NAME', db['NAME'])
477
        return 'sqlite:///%s' % name
478
    else:
479
        d = dict(scheme=django_sqlalchemy_engines.get(db['ENGINE']),
480
                 user=db['USER'],
481
                 pwd=db['PASSWORD'],
482
                 host=db['HOST'].lower(),
483
                 port=int(db['PORT']) if db['PORT'] != '' else '',
484
                 name=name)
485
        return '%(scheme)s://%(user)s:%(pwd)s@%(host)s:%(port)s/%(name)s' % d
486

  
487

  
488 456
def test_concurrently(times=2):
489 457
    """
490 458
    Add this decorator to small pieces of code that you want to test
b/snf-pithos-backend/pithos/backends/lib/sqlite/node.py
173 173
                    on versions(uuid) """)
174 174

  
175 175
        execute(""" create table if not exists attributes
176
                          ( serial integer,
177
                            domain text,
178
                            key    text,
179
                            value  text,
176
                          ( serial      integer,
177
                            domain      text,
178
                            key         text,
179
                            value       text,
180
                            node        integer not null    default 0,
181
                            is_latest   boolean not null    default 1,
180 182
                            primary key (serial, domain, key)
181 183
                            foreign key (serial)
182 184
                            references versions(serial)
......
184 186
                            on delete cascade ) """)
185 187
        execute(""" create index if not exists idx_attributes_domain
186 188
                    on attributes(domain) """)
189
        execute(""" create index if not exists idx_attributes_serial_node
190
                    on attributes(serial, node) """)
187 191

  
188 192
        wrapper = self.wrapper
189 193
        wrapper.execute()
......
203 207
        props = (parent, path)
204 208
        return self.execute(q, props).lastrowid
205 209

  
206
    def node_lookup(self, path, **kwargs):
210
    def node_lookup(self, path, for_update=False):
207 211
        """Lookup the current node of the given path.
208 212
           Return None if the path is not found.
209 213

  
......
267 271
            return 0
268 272
        return r[0]
269 273

  
270
    def node_purge_children(self, parent, before=inf, cluster=0):
274
    def node_purge_children(self, parent, before=inf, cluster=0,
275
                            update_statistics_ancestors_depth=None):
271 276
        """Delete all versions with the specified
272 277
           parent and cluster, and return
273 278
           the hashes, the size and the serials of versions deleted.
......
288 293
            return (), 0, ()
289 294
        mtime = time()
290 295
        self.statistics_update(parent, -nr, -size, mtime, cluster)
291
        self.statistics_update_ancestors(parent, -nr, -size, mtime, cluster)
296
        self.statistics_update_ancestors(parent, -nr, -size, mtime, cluster,
297
                                         update_statistics_ancestors_depth)
292 298

  
293 299
        q = ("select hash, serial from versions "
294 300
             "where node in (select node "
......
302 308
        for r in self.fetchall():
303 309
            hashes += [r[0]]
304 310
            serials += [r[1]]
305
        
311

  
306 312
        q = ("delete from versions "
307 313
             "where node in (select node "
308 314
             "from nodes "
......
319 325
        execute(q, (parent,))
320 326
        return hashes, size, serials
321 327

  
322
    def node_purge(self, node, before=inf, cluster=0):
328
    def node_purge(self, node, before=inf, cluster=0,
329
                   update_statistics_ancestors_depth=None):
323 330
        """Delete all versions with the specified
324 331
           node and cluster, and return
325 332
           the hashes, the size and the serials of versions deleted.
......
337 344
        if not nr:
338 345
            return (), 0, ()
339 346
        mtime = time()
340
        self.statistics_update_ancestors(node, -nr, -size, mtime, cluster)
347
        self.statistics_update_ancestors(node, -nr, -size, mtime, cluster,
348
                                         update_statistics_ancestors_depth)
341 349

  
342 350
        q = ("select hash, serial from versions "
343 351
             "where node = ? "
......
364 372
        execute(q, (node,))
365 373
        return hashes, size, serials
366 374

  
367
    def node_remove(self, node):
375
    def node_remove(self, node, update_statistics_ancestors_depth=None):
368 376
        """Remove the node specified.
369 377
           Return false if the node has children or is not found.
370 378
        """
......
380 388
        self.execute(q, (node,))
381 389
        for population, size, cluster in self.fetchall():
382 390
            self.statistics_update_ancestors(
383
                node, -population, -size, mtime, cluster)
391
                node, -population, -size, mtime, cluster,
392
                update_statistics_ancestors_depth)
384 393

  
385 394
        q = "delete from nodes where node = ?"
386 395
        self.execute(q, (node,))
......
458 467
        size += presize
459 468
        self.execute(qu, (node, population, size, mtime, cluster))
460 469

  
461
    def statistics_update_ancestors(self, node, population, size, mtime, cluster=0):
470
    def statistics_update_ancestors(self, node, population, size, mtime,
471
                                    cluster=0, recursion_depth=None):
462 472
        """Update the statistics of the given node's parent.
463 473
           Then recursively update all parents up to the root.
464 474
           Population is not recursive.
465 475
        """
466 476

  
477
        i = 0
467 478
        while True:
468
            if node == 0:
479
            if node == ROOTNODE:
480
                break
481
            if recursion_depth and recursion_depth <= i:
469 482
                break
470 483
            props = self.node_get_properties(node)
471 484
            if props is None:
......
474 487
            self.statistics_update(parent, population, size, mtime, cluster)
475 488
            node = parent
476 489
            population = 0  # Population isn't recursive
490
            i += 1
477 491

  
478 492
    def statistics_latest(self, node, before=inf, except_cluster=0):
479 493
        """Return population, total size and last mtime
......
547 561
        props = (serial, node)
548 562
        self.execute(q, props)
549 563

  
550
    def version_create(self, node, hash, size, type, source, muser, uuid, checksum, cluster=0):
564
    def version_create(self, node, hash, size, type, source, muser, uuid,
565
                       checksum, cluster=0,
566
                       update_statistics_ancestors_depth=None):
551 567
        """Create a new version from the given properties.
552 568
           Return the (serial, mtime) of the new version.
553 569
        """
......
558 574
        props = (node, hash, size, type, source, mtime, muser,
559 575
                 uuid, checksum, cluster)
560 576
        serial = self.execute(q, props).lastrowid
561
        self.statistics_update_ancestors(node, 1, size, mtime, cluster)
577
        self.statistics_update_ancestors(node, 1, size, mtime, cluster,
578
                                         update_statistics_ancestors_depth)
562 579

  
563 580
        self.nodes_set_latest_version(node, serial)
564 581

  
......
639 656
        q = "update versions set %s = ? where serial = ?" % key
640 657
        self.execute(q, (value, serial))
641 658

  
642
    def version_recluster(self, serial, cluster):
659
    def version_recluster(self, serial, cluster,
660
                          update_statistics_ancestors_depth=None):
643 661
        """Move the version into another cluster."""
644 662

  
645 663
        props = self.version_get_properties(serial)
......
652 670
            return
653 671

  
654 672
        mtime = time()
655
        self.statistics_update_ancestors(node, -1, -size, mtime, oldcluster)
656
        self.statistics_update_ancestors(node, 1, size, mtime, cluster)
673
        self.statistics_update_ancestors(node, -1, -size, mtime, oldcluster,
674
                                         update_statistics_ancestors_depth)
675
        self.statistics_update_ancestors(node, 1, size, mtime, cluster,
676
                                         update_statistics_ancestors_depth)
657 677

  
658 678
        q = "update versions set cluster = ? where serial = ?"
659 679
        self.execute(q, (cluster, serial))
660 680

  
661
    def version_remove(self, serial):
681
    def version_remove(self, serial, update_statistics_ancestors_depth=None):
662 682
        """Remove the serial specified."""
663 683

  
664 684
        props = self.version_get_properties(serial)
......
670 690
        cluster = props[CLUSTER]
671 691

  
672 692
        mtime = time()
673
        self.statistics_update_ancestors(node, -1, -size, mtime, cluster)
693
        self.statistics_update_ancestors(node, -1, -size, mtime, cluster,
694
                                         update_statistics_ancestors_depth)
674 695

  
675 696
        q = "delete from versions where serial = ?"
676 697
        self.execute(q, (serial,))
......
697 718
            execute(q, (serial, domain))
698 719
        return self.fetchall()
699 720

  
700
    def attribute_set(self, serial, domain, items):
721
    def attribute_set(self, serial, domain, node, items, is_latest=True):
701 722
        """Set the attributes of the version specified by serial.
702 723
           Receive attributes as an iterable of (key, value) pairs.
703 724
        """
704 725

  
705
        q = ("insert or replace into attributes (serial, domain, key, value) "
706
             "values (?, ?, ?, ?)")
707
        self.executemany(q, ((serial, domain, k, v) for k, v in items))
726
        q = ("insert or replace into attributes "
727
             "(serial, domain, node, is_latest, key, value) "
728
             "values (?, ?, ?, ?, ?, ?)")
729
        self.executemany(q, ((serial, domain, node, is_latest, k, v) for
730
            k, v in items))
708 731

  
709 732
    def attribute_del(self, serial, domain, keys=()):
710 733
        """Delete attributes of the version specified by serial.
......
721 744

  
722 745
    def attribute_copy(self, source, dest):
723 746
        q = ("insert or replace into attributes "
724
             "select ?, domain, key, value from attributes "
747
             "select ?, domain, node, is_latest, key, value from attributes "
725 748
             "where serial = ?")
726 749
        self.execute(q, (dest, source))
727 750

  
751
    def attribute_unset_is_latest(self, node, exclude):
752
        q = ("update attributes set is_latest = 0 "
753
             "where node = ? and serial != ?")
754
        self.execute(q, (node, exclude))
755

  
728 756
    def _construct_filters(self, domain, filterq):
729 757
        if not domain or not filterq:
730 758
            return None, None
......
1048 1076
             "v.size, v.type, v.source, v.mtime, v.muser, "
1049 1077
             "v.uuid, v.checksum, v.cluster, a.key, a.value "
1050 1078
             "from nodes n, versions v, attributes a "
1051
             "where n.node = v.node and "
1052
             "n.latest_version = v.serial and "
1053
             "v.serial = a.serial and "
1079
             "where v.serial = a.serial and "
1054 1080
             "a.domain = ? and "
1081
             "a.node = n.node and "
1082
             "a.is_latest = 1 and "
1055 1083
             "n.path in (%s)") % ','.join('?' for _ in paths)
1056 1084
        args = [domain]
1057 1085
        map(args.append, paths)
b/snf-pithos-backend/pithos/backends/lib/sqlite/public.py
74 74
                public_url_security, public_url_alphabet
75 75
            )
76 76
            q = "insert into public(path, active, url) values(?, 1, ?)"
77
            self.execute(q, (path, url))
78
            if sqlite3_changes() != 0:
77
            r = self.execute(q, (path, url))
78
            if r.rowcount != 0:
79 79
                logger.info('Public url set for path: %s' % path)
80 80

  
81 81
    def public_unset(self, path):

Also available in: Unified diff