/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/hashcache.py

  • Committer: Jelmer Vernooij
  • Date: 2020-03-22 01:35:14 UTC
  • mfrom: (7490.7.6 work)
  • mto: This revision was merged to the branch mainline in revision 7499.
  • Revision ID: jelmer@jelmer.uk-20200322013514-7vw1ntwho04rcuj3
merge lp:brz/3.1.

Show diffs side-by-side

added added

removed removed

Lines of Context:
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
 
from __future__ import absolute_import
18
 
 
19
17
# TODO: Up-front, stat all files in order and remove those which are deleted or
20
18
# out-of-date.  Don't actually re-read them until they're needed.  That ought
21
19
# to bring all the inodes into core so that future stats to them are fast, and
28
26
# TODO: Perhaps use a Python pickle instead of a text file; might be faster.
29
27
 
30
28
 
31
 
 
32
29
CACHE_HEADER = b"### bzr hashcache v5\n"
33
30
 
34
31
import os
42
39
    osutils,
43
40
    trace,
44
41
    )
45
 
from .sixish import (
46
 
    text_type,
47
 
    viewitems,
48
 
    )
49
42
 
50
43
 
51
44
FP_MTIME_COLUMN = 1
53
46
FP_MODE_COLUMN = 5
54
47
 
55
48
 
56
 
 
57
49
class HashCache(object):
58
50
    """Cache for looking up file SHA-1.
59
51
 
91
83
    needs_write = False
92
84
 
93
85
    def __init__(self, root, cache_file_name, mode=None,
94
 
            content_filter_stack_provider=None):
 
86
                 content_filter_stack_provider=None):
95
87
        """Create a hash cache in base dir, and set the file mode to mode.
96
88
 
97
89
        :param content_filter_stack_provider: a function that takes a
99
91
            parameters and returns a stack of ContentFilters.
100
92
            If None, no content filtering is performed.
101
93
        """
102
 
        if not isinstance(root, text_type):
 
94
        if not isinstance(root, str):
103
95
            raise ValueError("Base dir for hashcache must be text")
104
96
        self.root = root
105
97
        self.hit_count = 0
133
125
        # Stat in inode order as optimisation for at least linux.
134
126
        def inode_order(path_and_cache):
135
127
            return path_and_cache[1][1][3]
136
 
        for path, cache_val in sorted(viewitems(self._cache), key=inode_order):
 
128
        for path, cache_val in sorted(self._cache.items(), key=inode_order):
137
129
            abspath = osutils.pathjoin(self.root, path)
138
130
            fp = self._fingerprint(abspath)
139
131
            self.stat_count += 1
165
157
            cache_sha1, cache_fp = None, None
166
158
 
167
159
        if cache_fp == file_fp:
168
 
            ## mutter("hashcache hit for %s %r -> %s", path, file_fp, cache_sha1)
169
 
            ## mutter("now = %s", time.time())
170
160
            self.hit_count += 1
171
161
            return cache_sha1
172
162
 
177
167
            if self._filter_provider is None:
178
168
                filters = []
179
169
            else:
180
 
                filters = self._filter_provider(path=path, file_id=None)
 
170
                filters = self._filter_provider(path=path)
181
171
            digest = self._really_sha1_file(abspath, filters)
182
172
        elif stat.S_ISLNK(mode):
183
173
            target = osutils.readlink(abspath)
209
199
                self.needs_write = True
210
200
                del self._cache[path]
211
201
        else:
212
 
            ## mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
 
202
            # mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
213
203
            ##        path, time.time(), file_fp[FP_MTIME_COLUMN],
214
 
            ##        file_fp[FP_CTIME_COLUMN])
 
204
            # file_fp[FP_CTIME_COLUMN])
215
205
            self.update_count += 1
216
206
            self.needs_write = True
217
207
            self._cache[path] = (digest, file_fp)
223
213
 
224
214
    def write(self):
225
215
        """Write contents of cache to file."""
226
 
        outf = atomicfile.AtomicFile(self.cache_file_name(), 'wb',
227
 
                                     new_mode=self._mode)
228
 
        try:
 
216
        with atomicfile.AtomicFile(self.cache_file_name(), 'wb',
 
217
                                   new_mode=self._mode) as outf:
229
218
            outf.write(CACHE_HEADER)
230
219
 
231
 
            for path, c  in viewitems(self._cache):
 
220
            for path, c in self._cache.items():
232
221
                line_info = [path.encode('utf-8'), b'// ', c[0], b' ']
233
222
                line_info.append(b'%d %d %d %d %d %d' % c[1])
234
223
                line_info.append(b'\n')
235
224
                outf.write(b''.join(line_info))
236
 
            outf.commit()
237
225
            self.needs_write = False
238
 
            ## mutter("write hash cache: %s hits=%d misses=%d stat=%d recent=%d updates=%d",
239
 
            ##        self.cache_file_name(), self.hit_count, self.miss_count,
240
 
            ##        self.stat_count,
241
 
            ##        self.danger_count, self.update_count)
242
 
        finally:
243
 
            outf.close()
 
226
            # mutter("write hash cache: %s hits=%d misses=%d stat=%d recent=%d updates=%d",
 
227
            #        self.cache_file_name(), self.hit_count, self.miss_count,
 
228
            # self.stat_count,
 
229
            # self.danger_count, self.update_count)
244
230
 
245
231
    def read(self):
246
232
        """Reinstate cache from file.
255
241
        try:
256
242
            inf = open(fn, 'rb', buffering=65000)
257
243
        except IOError as e:
258
 
            trace.mutter("failed to open %s: %s", fn, e)
 
244
            trace.mutter("failed to open %s: %s", fn, str(e))
259
245
            # better write it now so it is valid
260
246
            self.needs_write = True
261
247
            return