from pithos.backends.modular import ModularBackend
from lib.hashmap import HashMap
-
-
-class Migration(object):
- def __init__(self, db):
- self.engine = create_engine(db)
- self.metadata = MetaData(self.engine)
- #self.engine.echo = True
- self.conn = self.engine.connect()
-
- options = getattr(settings, 'BACKEND', None)[1]
- self.backend = ModularBackend(*options)
-
- def execute(self):
- pass
+from lib.migrate import Migration
class DataMigration(Migration):
- def __init__(self, db):
- Migration.__init__(self, db)
+ def __init__(self, pithosdb, db):
+ Migration.__init__(self, pithosdb)
# XXX Need more columns for primary key - last modified timestamp...
+ engine = create_engine(db)
+ metadata = MetaData(engine)
+
columns=[]
columns.append(Column('path', String(2048), primary_key=True))
columns.append(Column('hash', String(255)))
- self.files = Table('files', self.metadata, *columns)
- self.metadata.create_all(self.engine)
+ self.files = Table('files', metadata, *columns)
+ metadata.create_all(engine)
def cache_put(self, path, hash):
# Insert or replace.
blockhash = self.backend.hash_algorithm
# Loop for all available files.
- for path in ['README', 'store', 'test']:
+ filebody = Table('filebody', self.metadata, autoload=True)
+ s = select([filebody.c.storedfilepath])
+ rp = self.conn.execute(s)
+ paths = rp.fetchall()
+ rp.close()
+
+ for path in paths:
map = HashMap(blocksize, blockhash)
map.load(path)
hash = hexlify(map.hash())
else:
status = '[-] ' + path
print status
-
+
if __name__ == "__main__":
+ pithosdb = ''
db = 'sqlite:///migrate.db'
- dt = DataMigration(db)
+ dt = DataMigration(pithosdb, db)
dt.execute()