root / snf-pithos-backend / pithos / backends / lib / sqlalchemy / alembic / versions / 54dbdde2d187_split_plankton_prope.py @ 73c37386
History | View | Annotate | Download (2.7 kB)
1 |
"""Split plankton properties
|
---|---|
2 |
|
3 |
Revision ID: 54dbdde2d187
|
4 |
Revises: 3b62b3f1bf6c
|
5 |
Create Date: 2014-01-21 11:44:34.783895
|
6 |
|
7 |
"""
|
8 |
|
9 |
# revision identifiers, used by Alembic.
|
10 |
revision = '54dbdde2d187'
|
11 |
down_revision = '3b62b3f1bf6c'
|
12 |
|
13 |
from alembic import op |
14 |
import sqlalchemy as sa |
15 |
|
16 |
import json |
17 |
|
18 |
from collections import defaultdict |
19 |
|
20 |
|
21 |
def upgrade(): |
22 |
c = op.get_bind() |
23 |
a = sa.sql.table( |
24 |
'attributes',
|
25 |
sa.sql.column('serial', sa.Integer),
|
26 |
sa.sql.column('domain', sa.String),
|
27 |
sa.sql.column('key', sa.String),
|
28 |
sa.sql.column('value', sa.String),
|
29 |
sa.sql.column('node', sa.Integer),
|
30 |
sa.sql.column('is_latest', sa.Boolean))
|
31 |
|
32 |
s = sa.select([a.c.serial, |
33 |
a.c.domain, |
34 |
a.c.key, |
35 |
a.c.value, |
36 |
a.c.node, |
37 |
a.c.is_latest]) |
38 |
cond = sa.sql.and_(a.c.domain == 'plankton',
|
39 |
a.c.key == 'plankton:properties')
|
40 |
s = s.where(cond) |
41 |
entries = c.execute(s).fetchall() |
42 |
if not entries: |
43 |
return
|
44 |
|
45 |
values = [] |
46 |
for e in entries: |
47 |
d = dict(e.items())
|
48 |
properties = json.loads(e['value'])
|
49 |
for k, v in properties.items(): |
50 |
copy = d.copy() |
51 |
copy.update({'key': 'plankton:property:%s' % k, |
52 |
'value': v})
|
53 |
values.append(copy) |
54 |
|
55 |
op.bulk_insert(a, values) |
56 |
|
57 |
d = a.delete().where(cond) |
58 |
op.execute(d) |
59 |
|
60 |
|
61 |
def downgrade(): |
62 |
c = op.get_bind() |
63 |
a = sa.sql.table( |
64 |
'attributes',
|
65 |
sa.sql.column('serial', sa.Integer),
|
66 |
sa.sql.column('domain', sa.String),
|
67 |
sa.sql.column('key', sa.String),
|
68 |
sa.sql.column('value', sa.String),
|
69 |
sa.sql.column('node', sa.Integer),
|
70 |
sa.sql.column('is_latest', sa.Boolean))
|
71 |
|
72 |
s = sa.select([a.c.serial, |
73 |
a.c.domain, |
74 |
a.c.key, |
75 |
a.c.value, |
76 |
a.c.node, |
77 |
a.c.is_latest]) |
78 |
cond = sa.sql.and_(a.c.domain == 'plankton',
|
79 |
a.c.key.like('plankton:property:%'))
|
80 |
s = s.where(cond) |
81 |
entries = c.execute(s).fetchall() |
82 |
if not entries: |
83 |
return
|
84 |
|
85 |
props = defaultdict(dict)
|
86 |
for e in entries: |
87 |
k = e.key.replace('plankton:property:', '', 1) |
88 |
props[(e.serial, e.domain, e.node, e.is_latest)][k] = e.value |
89 |
|
90 |
values = [] |
91 |
for k in props: |
92 |
serial, domain, node, is_latest = k |
93 |
values.append({'serial': serial,
|
94 |
'domain': domain,
|
95 |
'node': node,
|
96 |
'is_latest': is_latest,
|
97 |
'key': 'plankton:properties', |
98 |
'value': json.dumps(props[k])})
|
99 |
|
100 |
op.bulk_insert(a, values) |
101 |
|
102 |
d = a.delete().where(cond) |
103 |
op.execute(d) |