Statistics
| Branch: | Tag: | Revision:

root / snf-pithos-backend / pithos / backends / lib / hashfiler / fileblocker.py @ c9b996b7

History | View | Annotate | Download (7.3 kB)

1
# Copyright 2011-2012 GRNET S.A. All rights reserved.
2
#
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
#
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
#
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
#
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
#
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from os import makedirs
35
from os.path import isdir, realpath, exists, join
36
from hashlib import new as newhasher
37
from binascii import hexlify
38

    
39
from context_file import ContextFile, file_sync_read_chunks
40

    
41

    
42
class FileBlocker(object):
43
    """Blocker.
44
       Required constructor parameters: blocksize, blockpath, hashtype.
45
    """
46

    
47
    blocksize = None
48
    blockpath = None
49
    hashtype = None
50

    
51
    def __init__(self, **params):
52
        blocksize = params['blocksize']
53
        blockpath = params['blockpath']
54
        blockpath = realpath(blockpath)
55
        if not isdir(blockpath):
56
            if not exists(blockpath):
57
                makedirs(blockpath)
58
            else:
59
                m = "Variable blockpath '%s' is not a directory" % (blockpath,)
60
                raise ValueError(m)
61

    
62
        hashtype = params['hashtype']
63
        try:
64
            hasher = newhasher(hashtype)
65
        except ValueError:
66
            msg = "Variable hashtype '%s' is not available from hashlib"
67
            raise ValueError(msg % (hashtype,))
68

    
69
        hasher.update("")
70
        emptyhash = hasher.digest()
71

    
72
        self.blocksize = blocksize
73
        self.blockpath = blockpath
74
        self.hashtype = hashtype
75
        self.hashlen = len(emptyhash)
76
        self.emptyhash = emptyhash
77

    
78
    def _pad(self, block):
79
        return block + ('\x00' * (self.blocksize - len(block)))
80

    
81
    def _get_rear_block(self, blkhash, create=False, write=False):
82
        filename = hexlify(blkhash)
83
        path = join(self.blockpath,
84
                    filename[0:2], filename[2:4], filename[4:6],
85
                    filename)
86
        return ContextFile(path, create=create, write=False)
87

    
88
    def _check_rear_block(self, blkhash):
89
        filename = hexlify(blkhash)
90
        path = join(self.blockpath,
91
                    filename[0:2], filename[2:4], filename[4:6],
92
                    filename)
93
        return exists(path)
94

    
95
    def block_hash(self, data):
96
        """Hash a block of data"""
97
        hasher = newhasher(self.hashtype)
98
        hasher.update(data.rstrip('\x00'))
99
        return hasher.digest()
100

    
101
    def block_ping(self, hashes):
102
        """Check hashes for existence and
103
           return those missing from block storage.
104
        """
105
        notfound = []
106
        append = notfound.append
107

    
108
        for h in hashes:
109
            if h not in notfound and not self._check_rear_block(h):
110
                append(h)
111

    
112
        return notfound
113

    
114
    def block_retr(self, hashes):
115
        """Retrieve blocks from storage by their hashes."""
116
        blocksize = self.blocksize
117
        blocks = []
118
        append = blocks.append
119
        block = None
120

    
121
        for h in hashes:
122
            if h == self.emptyhash:
123
                append(self._pad(''))
124
                continue
125
            with self._get_rear_block(h, create=False, write=False) as rbl:
126
                if not rbl:
127
                    break
128
                for block in rbl.sync_read_chunks(blocksize, 1, 0):
129
                    break  # there should be just one block there
130
            if not block:
131
                break
132
            append(self._pad(block))
133

    
134
        return blocks
135

    
136
    def block_stor(self, blocklist):
137
        """Store a bunch of blocks and return (hashes, missing).
138
           Hashes is a list of the hashes of the blocks,
139
           missing is a list of indices in that list indicating
140
           which blocks were missing from the store.
141
        """
142
        block_hash = self.block_hash
143
        hashlist = [block_hash(b) for b in blocklist]
144
        missing = [i for i, h in enumerate(hashlist)
145
                   if not self._check_rear_block(h)]
146
        for i in missing:
147
            with self._get_rear_block(hashlist[i],
148
                                      create=True, write=False) as rbl:
149
                                      #            ^^^^^^^^^^^
150
                                      # do not overwrite if exists
151
                rbl.sync_write(blocklist[i])  # XXX: verify?
152

    
153
        return hashlist, missing
154

    
155
    def block_delta(self, blkhash, offset, data):
156
        """Construct and store a new block from a given block
157
           and a data 'patch' applied at offset. Return:
158
           (the hash of the new block, if the block already existed)
159
        """
160

    
161
        blocksize = self.blocksize
162
        if offset >= blocksize or not data:
163
            return None, None
164

    
165
        block = self.block_retr((blkhash,))
166
        if not block:
167
            return None, None
168

    
169
        block = block[0]
170
        newblock = block[:offset] + data
171
        if len(newblock) > blocksize:
172
            newblock = newblock[:blocksize]
173
        elif len(newblock) < blocksize:
174
            newblock += block[len(newblock):]
175

    
176
        h, a = self.block_stor((newblock,))
177
        return h[0], 1 if a else 0
178

    
179
    def block_hash_file(self, openfile):
180
        """Return the list of hashes (hashes map)
181
           for the blocks in a buffered file.
182
           Helper method, does not affect store.
183
        """
184
        hashes = []
185
        append = hashes.append
186
        block_hash = self.block_hash
187

    
188
        for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
189
            append(block_hash(block))
190

    
191
        return hashes
192

    
193
    def block_stor_file(self, openfile):
194
        """Read blocks from buffered file object and store them. Return:
195
           (bytes read, list of hashes, list of hashes that were missing)
196
        """
197
        blocksize = self.blocksize
198
        block_stor = self.block_stor
199
        hashlist = []
200
        hextend = hashlist.extend
201
        storedlist = []
202
        sextend = storedlist.extend
203
        lastsize = 0
204

    
205
        for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
206
            hl, sl = block_stor((block,))
207
            hextend(hl)
208
            sextend(sl)
209
            lastsize = len(block)
210

    
211
        size = (len(hashlist) - 1) * blocksize + lastsize if hashlist else 0
212
        return size, hashlist, storedlist