Statistics
| Branch: | Tag: | Revision:

root / snf-pithos-backend / pithos / backends / lib / hashfiler / fileblocker.py @ c30635bf

History | View | Annotate | Download (7 kB)

1
# Copyright 2011-2012 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from os import makedirs
35
from os.path import isdir, realpath, exists, join
36
from hashlib import new as newhasher
37
from binascii import hexlify
38

    
39
from context_file import ContextFile, file_sync_read_chunks
40

    
41

    
42
class FileBlocker(object):
43
    """Blocker.
44
       Required constructor parameters: blocksize, blockpath, hashtype.
45
    """
46

    
47
    blocksize = None
48
    blockpath = None
49
    hashtype = None
50

    
51
    def __init__(self, **params):
52
        blocksize = params['blocksize']
53
        blockpath = params['blockpath']
54
        blockpath = realpath(blockpath)
55
        if not isdir(blockpath):
56
            if not exists(blockpath):
57
                makedirs(blockpath)
58
            else:
59
                raise ValueError("Variable blockpath '%s' is not a directory" % (blockpath,))
60

    
61
        hashtype = params['hashtype']
62
        try:
63
            hasher = newhasher(hashtype)
64
        except ValueError:
65
            msg = "Variable hashtype '%s' is not available from hashlib"
66
            raise ValueError(msg % (hashtype,))
67

    
68
        hasher.update("")
69
        emptyhash = hasher.digest()
70

    
71
        self.blocksize = blocksize
72
        self.blockpath = blockpath
73
        self.hashtype = hashtype
74
        self.hashlen = len(emptyhash)
75
        self.emptyhash = emptyhash
76

    
77
    def _pad(self, block):
78
        return block + ('\x00' * (self.blocksize - len(block)))
79

    
80
    def _get_rear_block(self, blkhash, create=0):
81
        filename = hexlify(blkhash)
82
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
83
        if not exists(dir):
84
            makedirs(dir)
85
        name = join(dir, filename)
86
        return ContextFile(name, create)
87

    
88
    def _check_rear_block(self, blkhash):
89
        filename = hexlify(blkhash)
90
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
91
        name = join(dir, filename)
92
        return exists(name)
93

    
94
    def block_hash(self, data):
95
        """Hash a block of data"""
96
        hasher = newhasher(self.hashtype)
97
        hasher.update(data.rstrip('\x00'))
98
        return hasher.digest()
99

    
100
    def block_ping(self, hashes):
101
        """Check hashes for existence and
102
           return those missing from block storage.
103
        """
104
        notfound = []
105
        append = notfound.append
106

    
107
        for h in hashes:
108
            if h not in notfound and not self._check_rear_block(h):
109
                append(h)
110

    
111
        return notfound
112

    
113
    def block_retr(self, hashes):
114
        """Retrieve blocks from storage by their hashes."""
115
        blocksize = self.blocksize
116
        blocks = []
117
        append = blocks.append
118
        block = None
119

    
120
        for h in hashes:
121
            if h == self.emptyhash:
122
                append(self._pad(''))
123
                continue
124
            with self._get_rear_block(h, 0) as rbl:
125
                if not rbl:
126
                    break
127
                for block in rbl.sync_read_chunks(blocksize, 1, 0):
128
                    break # there should be just one block there
129
            if not block:
130
                break
131
            append(self._pad(block))
132

    
133
        return blocks
134

    
135
    def block_stor(self, blocklist):
136
        """Store a bunch of blocks and return (hashes, missing).
137
           Hashes is a list of the hashes of the blocks,
138
           missing is a list of indices in that list indicating
139
           which blocks were missing from the store.
140
        """
141
        block_hash = self.block_hash
142
        hashlist = [block_hash(b) for b in blocklist]
143
        mf = None
144
        missing = [i for i, h in enumerate(hashlist) if not self._check_rear_block(h)]
145
        for i in missing:
146
            with self._get_rear_block(hashlist[i], 1) as rbl:
147
                 rbl.sync_write(blocklist[i]) #XXX: verify?
148

    
149
        return hashlist, missing
150

    
151
    def block_delta(self, blkhash, offset, data):
152
        """Construct and store a new block from a given block
153
           and a data 'patch' applied at offset. Return:
154
           (the hash of the new block, if the block already existed)
155
        """
156

    
157
        blocksize = self.blocksize
158
        if offset >= blocksize or not data:
159
            return None, None
160

    
161
        block = self.block_retr((blkhash,))
162
        if not block:
163
            return None, None
164
        
165
        block = block[0]
166
        newblock = block[:offset] + data
167
        if len(newblock) > blocksize:
168
            newblock = newblock[:blocksize]
169
        elif len(newblock) < blocksize:
170
            newblock += block[len(newblock):]
171

    
172
        h, a = self.block_stor((newblock,))
173
        return h[0], 1 if a else 0
174

    
175
    def block_hash_file(self, openfile):
176
        """Return the list of hashes (hashes map)
177
           for the blocks in a buffered file.
178
           Helper method, does not affect store.
179
        """
180
        hashes = []
181
        append = hashes.append
182
        block_hash = self.block_hash
183

    
184
        for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
185
            append(block_hash(block))
186

    
187
        return hashes
188

    
189
    def block_stor_file(self, openfile):
190
        """Read blocks from buffered file object and store them. Return:
191
           (bytes read, list of hashes, list of hashes that were missing)
192
        """
193
        blocksize = self.blocksize
194
        block_stor = self.block_stor
195
        hashlist = []
196
        hextend = hashlist.extend
197
        storedlist = []
198
        sextend = storedlist.extend
199
        lastsize = 0
200

    
201
        for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
202
            hl, sl = block_stor((block,))
203
            hextend(hl)
204
            sextend(sl)
205
            lastsize = len(block)
206

    
207
        size = (len(hashlist) -1) * blocksize + lastsize if hashlist else 0
208
        return size, hashlist, storedlist
209