/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/hashcache.py

  • Committer: Jelmer Vernooij
  • Date: 2018-02-18 21:42:57 UTC
  • mto: This revision was merged to the branch mainline in revision 6859.
  • Revision ID: jelmer@jelmer.uk-20180218214257-jpevutp1wa30tz3v
Update TODO to reference Breezy, not Bazaar.

Show diffs side-by-side

added added

removed removed

Lines of Context:
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
 
17
from __future__ import absolute_import
 
18
 
17
19
# TODO: Up-front, stat all files in order and remove those which are deleted or
18
20
# out-of-date.  Don't actually re-read them until they're needed.  That ought
19
21
# to bring all the inodes into core so that future stats to them are fast, and
26
28
# TODO: Perhaps use a Python pickle instead of a text file; might be faster.
27
29
 
28
30
 
 
31
 
29
32
CACHE_HEADER = b"### bzr hashcache v5\n"
30
33
 
31
34
import os
39
42
    osutils,
40
43
    trace,
41
44
    )
 
45
from .sixish import (
 
46
    text_type,
 
47
    viewitems,
 
48
    )
42
49
 
43
50
 
44
51
FP_MTIME_COLUMN = 1
46
53
FP_MODE_COLUMN = 5
47
54
 
48
55
 
 
56
 
49
57
class HashCache(object):
50
58
    """Cache for looking up file SHA-1.
51
59
 
83
91
    needs_write = False
84
92
 
85
93
    def __init__(self, root, cache_file_name, mode=None,
86
 
                 content_filter_stack_provider=None):
 
94
            content_filter_stack_provider=None):
87
95
        """Create a hash cache in base dir, and set the file mode to mode.
88
96
 
89
97
        :param content_filter_stack_provider: a function that takes a
91
99
            parameters and returns a stack of ContentFilters.
92
100
            If None, no content filtering is performed.
93
101
        """
94
 
        if not isinstance(root, str):
 
102
        if not isinstance(root, text_type):
95
103
            raise ValueError("Base dir for hashcache must be text")
96
104
        self.root = root
97
105
        self.hit_count = 0
125
133
        # Stat in inode order as optimisation for at least linux.
126
134
        def inode_order(path_and_cache):
127
135
            return path_and_cache[1][1][3]
128
 
        for path, cache_val in sorted(self._cache.items(), key=inode_order):
 
136
        for path, cache_val in sorted(viewitems(self._cache), key=inode_order):
129
137
            abspath = osutils.pathjoin(self.root, path)
130
138
            fp = self._fingerprint(abspath)
131
139
            self.stat_count += 1
157
165
            cache_sha1, cache_fp = None, None
158
166
 
159
167
        if cache_fp == file_fp:
 
168
            ## mutter("hashcache hit for %s %r -> %s", path, file_fp, cache_sha1)
 
169
            ## mutter("now = %s", time.time())
160
170
            self.hit_count += 1
161
171
            return cache_sha1
162
172
 
167
177
            if self._filter_provider is None:
168
178
                filters = []
169
179
            else:
170
 
                filters = self._filter_provider(path=path)
 
180
                filters = self._filter_provider(path=path, file_id=None)
171
181
            digest = self._really_sha1_file(abspath, filters)
172
182
        elif stat.S_ISLNK(mode):
173
183
            target = osutils.readlink(abspath)
199
209
                self.needs_write = True
200
210
                del self._cache[path]
201
211
        else:
202
 
            # mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
 
212
            ## mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
203
213
            ##        path, time.time(), file_fp[FP_MTIME_COLUMN],
204
 
            # file_fp[FP_CTIME_COLUMN])
 
214
            ##        file_fp[FP_CTIME_COLUMN])
205
215
            self.update_count += 1
206
216
            self.needs_write = True
207
217
            self._cache[path] = (digest, file_fp)
213
223
 
214
224
    def write(self):
215
225
        """Write contents of cache to file."""
216
 
        with atomicfile.AtomicFile(self.cache_file_name(), 'wb',
217
 
                                   new_mode=self._mode) as outf:
 
226
        outf = atomicfile.AtomicFile(self.cache_file_name(), 'wb',
 
227
                                     new_mode=self._mode)
 
228
        try:
218
229
            outf.write(CACHE_HEADER)
219
230
 
220
 
            for path, c in self._cache.items():
 
231
            for path, c  in viewitems(self._cache):
221
232
                line_info = [path.encode('utf-8'), b'// ', c[0], b' ']
222
233
                line_info.append(b'%d %d %d %d %d %d' % c[1])
223
234
                line_info.append(b'\n')
224
235
                outf.write(b''.join(line_info))
 
236
            outf.commit()
225
237
            self.needs_write = False
226
 
            # mutter("write hash cache: %s hits=%d misses=%d stat=%d recent=%d updates=%d",
227
 
            #        self.cache_file_name(), self.hit_count, self.miss_count,
228
 
            # self.stat_count,
229
 
            # self.danger_count, self.update_count)
 
238
            ## mutter("write hash cache: %s hits=%d misses=%d stat=%d recent=%d updates=%d",
 
239
            ##        self.cache_file_name(), self.hit_count, self.miss_count,
 
240
            ##        self.stat_count,
 
241
            ##        self.danger_count, self.update_count)
 
242
        finally:
 
243
            outf.close()
230
244
 
231
245
    def read(self):
232
246
        """Reinstate cache from file.
241
255
        try:
242
256
            inf = open(fn, 'rb', buffering=65000)
243
257
        except IOError as e:
244
 
            trace.mutter("failed to open %s: %s", fn, str(e))
 
258
            trace.mutter("failed to open %s: %s", fn, e)
245
259
            # better write it now so it is valid
246
260
            self.needs_write = True
247
261
            return