Statistics
| Branch: | Tag: | Revision:

root / snf-pithos-backend / pithos / backends / lib / hashfiler / blocker.py @ 8c306eab

History | View | Annotate | Download (7.1 kB)

1
# Copyright 2011 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from os import makedirs
35
from os.path import isdir, realpath, exists, join
36
from hashlib import new as newhasher
37
from binascii import hexlify
38

    
39
from context_file import ContextFile, file_sync_read_chunks
40

    
41

    
42
class Blocker(object):
43
    """Blocker.
44
       Required contstructor parameters: blocksize, blockpath, hashtype.
45
    """
46

    
47
    blocksize = None
48
    blockpath = None
49
    hashtype = None
50

    
51
    def __init__(self, **params):
52
        blocksize = params['blocksize']
53
        blockpath = params['blockpath']
54
        blockpath = realpath(blockpath)
55
        if not isdir(blockpath):
56
            if not exists(blockpath):
57
                makedirs(blockpath)
58
            else:
59
                raise ValueError("Variable blockpath '%s' is not a directory" % (blockpath,))
60

    
61
        hashtype = params['hashtype']
62
        try:
63
            hasher = newhasher(hashtype)
64
        except ValueError:
65
            msg = "Variable hashtype '%s' is not available from hashlib"
66
            raise ValueError(msg % (hashtype,))
67

    
68
        hasher.update("")
69
        emptyhash = hasher.digest()
70

    
71
        self.blocksize = blocksize
72
        self.blockpath = blockpath
73
        self.hashtype = hashtype
74
        self.hashlen = len(emptyhash)
75
        self.emptyhash = emptyhash
76

    
77
    def _get_rear_block(self, blkhash, create=0):
78
        filename = hexlify(blkhash)
79
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
80
        if not exists(dir):
81
            makedirs(dir)
82
        name = join(dir, filename)
83
        return ContextFile(name, create)
84

    
85
    def _check_rear_block(self, blkhash):
86
        filename = hexlify(blkhash)
87
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
88
        name = join(dir, filename)
89
        return exists(name)
90

    
91
    def block_hash(self, data):
92
        """Hash a block of data"""
93
        hasher = newhasher(self.hashtype)
94
        hasher.update(data.rstrip('\x00'))
95
        return hasher.digest()
96

    
97
    def block_ping(self, hashes):
98
        """Check hashes for existence and
99
           return those missing from block storage.
100
        """
101
        notfound = []
102
        append = notfound.append
103

    
104
        for h in hashes:
105
            if h not in notfound and not self._check_rear_block(h):
106
                append(h)
107

    
108
        return notfound
109

    
110
    def block_retr(self, hashes):
111
        """Retrieve blocks from storage by their hashes."""
112
        blocksize = self.blocksize
113
        blocks = []
114
        append = blocks.append
115
        block = None
116

    
117
        for h in hashes:
118
            if h == self.emptyhash:
119
                append('')
120
                continue
121
            with self._get_rear_block(h, 0) as rbl:
122
                if not rbl:
123
                    break
124
                for block in rbl.sync_read_chunks(blocksize, 1, 0):
125
                    break # there should be just one block there
126
            if not block:
127
                break
128
            append(block)
129

    
130
        return blocks
131

    
132
    def block_stor(self, blocklist):
133
        """Store a bunch of blocks and return (hashes, missing).
134
           Hashes is a list of the hashes of the blocks,
135
           missing is a list of indices in that list indicating
136
           which blocks were missing from the store.
137
        """
138
        block_hash = self.block_hash
139
        hashlist = [block_hash(b) for b in blocklist]
140
        mf = None
141
        missing = [i for i, h in enumerate(hashlist) if not self._check_rear_block(h)]
142
        for i in missing:
143
            with self._get_rear_block(hashlist[i], 1) as rbl:
144
                 rbl.sync_write(blocklist[i]) #XXX: verify?
145

    
146
        return hashlist, missing
147

    
148
    def block_delta(self, blkhash, offdata=()):
149
        """Construct and store a new block from a given block
150
           and a list of (offset, data) 'patches'. Return:
151
           (the hash of the new block, if the block already existed)
152
        """
153
        if not offdata:
154
            return None, None
155

    
156
        blocksize = self.blocksize
157
        block = self.block_retr((blkhash,))
158
        if not block:
159
            return None, None
160

    
161
        block = block[0]
162
        newblock = ''
163
        idx = 0
164
        size = 0
165
        trunc = 0
166
        for off, data in offdata:
167
            if not data:
168
                trunc = 1
169
                break
170
            newblock += block[idx:off] + data
171
            size += off - idx + len(data)
172
            if size >= blocksize:
173
                break
174
            off = size
175

    
176
        if not trunc:
177
            newblock += block[size:len(block)]
178

    
179
        h, a = self.block_stor((newblock,))
180
        return h[0], 1 if a else 0
181

    
182
    def block_hash_file(self, openfile):
183
        """Return the list of hashes (hashes map)
184
           for the blocks in a buffered file.
185
           Helper method, does not affect store.
186
        """
187
        hashes = []
188
        append = hashes.append
189
        block_hash = self.block_hash
190

    
191
        for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
192
            append(block_hash(block))
193

    
194
        return hashes
195

    
196
    def block_stor_file(self, openfile):
197
        """Read blocks from buffered file object and store them. Return:
198
           (bytes read, list of hashes, list of hashes that were missing)
199
        """
200
        blocksize = self.blocksize
201
        block_stor = self.block_stor
202
        hashlist = []
203
        hextend = hashlist.extend
204
        storedlist = []
205
        sextend = storedlist.extend
206
        lastsize = 0
207

    
208
        for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
209
            hl, sl = block_stor((block,))
210
            hextend(hl)
211
            sextend(sl)
212
            lastsize = len(block)
213

    
214
        size = (len(hashlist) -1) * blocksize + lastsize if hashlist else 0
215
        return size, hashlist, storedlist
216