Statistics
| Branch: | Tag: | Revision:

root / pithos / backends / lib / hashfiler / blocker.py @ 6f4bce7b

History | View | Annotate | Download (6.7 kB)

1
# Copyright 2011 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from os import makedirs
35
from os.path import isdir, realpath, exists, join
36
from hashlib import new as newhasher
37
from binascii import hexlify
38

    
39
from context_file import ContextFile, file_sync_read_chunks
40

    
41

    
42
class Blocker(object):
43
    """Blocker.
44
       Required contstructor parameters: blocksize, blockpath, hashtype.
45
    """
46

    
47
    blocksize = None
48
    blockpath = None
49
    hashtype = None
50

    
51
    def __init__(self, **params):
52
        blocksize = params['blocksize']
53
        blockpath = params['blockpath']
54
        blockpath = realpath(blockpath)
55
        if not isdir(blockpath):
56
            if not exists(blockpath):
57
                makedirs(blockpath)
58
            else:
59
                raise ValueError("Variable blockpath '%s' is not a directory" % (blockpath,))
60

    
61
        hashtype = params['hashtype']
62
        try:
63
            hasher = newhasher(hashtype)
64
        except ValueError:
65
            msg = "Variable hashtype '%s' is not available from hashlib"
66
            raise ValueError(msg % (hashtype,))
67

    
68
        hasher.update("")
69
        emptyhash = hasher.digest()
70

    
71
        self.blocksize = blocksize
72
        self.blockpath = blockpath
73
        self.hashtype = hashtype
74
        self.hashlen = len(emptyhash)
75
        self.emptyhash = emptyhash
76

    
77
    def get_rear_block(self, blkhash, create=0):
78
        name = join(self.blockpath, hexlify(blkhash))
79
        return ContextFile(name, create)
80

    
81
    def check_rear_block(self, blkhash):
82
        name = join(self.blockpath, hexlify(blkhash))
83
        return exists(name)
84

    
85
    def block_hash(self, data):
86
        """Hash a block of data"""
87
        hasher = newhasher(self.hashtype)
88
        hasher.update(data.rstrip('\x00'))
89
        return hasher.digest()
90

    
91
    def block_ping(self, hashes):
92
        """Check hashes for existence and
93
           return those missing from block storage.
94
        """
95
        missing = []
96
        append = missing.append
97
        for i, h in enumerate(hashes):
98
            if not self.check_rear_block(h):
99
                append(i)
100
        return missing
101

    
102
    def block_retr(self, hashes):
103
        """Retrieve blocks from storage by their hashes."""
104
        blocksize = self.blocksize
105
        blocks = []
106
        append = blocks.append
107
        block = None
108

    
109
        for h in hashes:
110
            with self.get_rear_block(h, 0) as rbl:
111
                if not rbl:
112
                    break
113
                for block in rbl.sync_read_chunks(blocksize, 1, 0):
114
                    break # there should be just one block there
115
            if not block:
116
                break
117
            append(block)
118

    
119
        return blocks
120

    
121
    def block_stor(self, blocklist):
122
        """Store a bunch of blocks and return (hashes, missing).
123
           Hashes is a list of the hashes of the blocks,
124
           missing is a list of indices in that list indicating
125
           which blocks were missing from the store.
126
        """
127
        block_hash = self.block_hash
128
        hashlist = [block_hash(b) for b in blocklist]
129
        mf = None
130
        missing = self.block_ping(hashlist)
131
        for i in missing:
132
            with self.get_rear_block(hashlist[i], 1) as rbl:
133
                 rbl.sync_write(blocklist[i]) #XXX: verify?
134

    
135
        return hashlist, missing
136

    
137
    def block_delta(self, blkhash, offdata=()):
138
        """Construct and store a new block from a given block
139
           and a list of (offset, data) 'patches'. Return:
140
           (the hash of the new block, if the block already existed)
141
        """
142
        if not offdata:
143
            return None, None
144

    
145
        blocksize = self.blocksize
146
        block = self.block_retr((blkhash,))
147
        if not block:
148
            return None, None
149

    
150
        block = block[0]
151
        newblock = ''
152
        idx = 0
153
        size = 0
154
        trunc = 0
155
        for off, data in offdata:
156
            if not data:
157
                trunc = 1
158
                break
159
            newblock += block[idx:off] + data
160
            size += off - idx + len(data)
161
            if size >= blocksize:
162
                break
163
            off = size
164

    
165
        if not trunc:
166
            newblock += block[size:len(block)]
167

    
168
        h, a = self.block_stor((newblock,))
169
        return h[0], 1 if a else 0
170

    
171
    def block_hash_file(self, openfile):
172
        """Return the list of hashes (hashes map)
173
           for the blocks in a buffered file.
174
           Helper method, does not affect store.
175
        """
176
        hashes = []
177
        append = hashes.append
178
        block_hash = self.block_hash
179

    
180
        for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
181
            append(block_hash(block))
182

    
183
        return hashes
184

    
185
    def block_stor_file(self, openfile):
186
        """Read blocks from buffered file object and store them. Return:
187
           (bytes read, list of hashes, list of hashes that were missing)
188
        """
189
        blocksize = self.blocksize
190
        block_stor = self.block_stor
191
        hashlist = []
192
        hextend = hashlist.extend
193
        storedlist = []
194
        sextend = storedlist.extend
195
        lastsize = 0
196

    
197
        for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
198
            hl, sl = block_stor((block,))
199
            hextend(hl)
200
            sextend(sl)
201
            lastsize = len(block)
202

    
203
        size = (len(hashlist) -1) * blocksize + lastsize if hashlist else 0
204
        return size, hashlist, storedlist
205