bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
|
362
by Martin Pool
- Import stat-cache code |
1 |
# (C) 2005 Canonical Ltd
|
2 |
||
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
||
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
||
13 |
# You should have received a copy of the GNU General Public License
|
|
14 |
# along with this program; if not, write to the Free Software
|
|
15 |
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
16 |
||
17 |
import stat, os, sha, time |
|
18 |
from binascii import b2a_qp, a2b_qp |
|
19 |
||
20 |
from trace import mutter |
|
21 |
||
22 |
||
|
427
by Martin Pool
- statcache docs |
23 |
"""File stat cache to speed up tree comparisons.
|
24 |
||
25 |
This module basically gives a quick way to find the SHA-1 and related
|
|
26 |
information of a file in the working directory, without actually
|
|
27 |
reading and hashing the whole file.
|
|
28 |
||
29 |
This is done by maintaining a cache indexed by a file fingerprint of
|
|
30 |
(path, size, mtime, ctime, ino, dev) pointing to the SHA-1. If the
|
|
31 |
fingerprint has changed, we assume the file content has not changed
|
|
32 |
either and the SHA-1 is therefore the same.
|
|
33 |
||
34 |
If any of the fingerprint fields have changed then the file content
|
|
35 |
*may* have changed, or it may not have. We need to reread the file
|
|
36 |
contents to make sure, but this is not visible to the user or
|
|
37 |
higher-level code (except as a delay of course).
|
|
38 |
||
39 |
The mtime and ctime are stored with nanosecond fields, but not all
|
|
40 |
filesystems give this level of precision. There is therefore a
|
|
41 |
possible race: the file might be modified twice within a second
|
|
42 |
without changing the size or mtime, and a SHA-1 cached from the first
|
|
43 |
version would be wrong. We handle this by not recording a cached hash
|
|
44 |
for any files which were modified in the current second and that
|
|
45 |
therefore have the chance to change again before the second is up.
|
|
46 |
||
47 |
The only known hole in this design is if the system clock jumps
|
|
48 |
backwards crossing invocations of bzr. Please don't do that; use ntp
|
|
49 |
to gradually adjust your clock or don't use bzr over the step.
|
|
50 |
||
51 |
At the moment this is stored in a simple textfile; it might be nice
|
|
52 |
to use a tdb instead.
|
|
53 |
"""
|
|
|
362
by Martin Pool
- Import stat-cache code |
54 |
|
55 |
||
56 |
||
57 |
def fingerprint(path, abspath): |
|
58 |
try: |
|
59 |
fs = os.lstat(abspath) |
|
60 |
except OSError: |
|
61 |
# might be missing, etc
|
|
62 |
return None |
|
63 |
||
64 |
if stat.S_ISDIR(fs.st_mode): |
|
65 |
return None |
|
66 |
||
67 |
return (fs.st_size, fs.st_mtime, |
|
68 |
fs.st_ctime, fs.st_ino, fs.st_dev) |
|
69 |
||
70 |
||
71 |
def write_cache(branch, entry_iter): |
|
72 |
outf = branch.controlfile('work-cache.tmp', 'wt') |
|
73 |
for entry in entry_iter: |
|
74 |
outf.write(entry[0] + ' ' + entry[1] + ' ') |
|
75 |
outf.write(b2a_qp(entry[2], True)) |
|
76 |
outf.write(' %d %d %d %d %d\n' % entry[3:]) |
|
77 |
||
78 |
outf.close() |
|
79 |
os.rename(branch.controlfilename('work-cache.tmp'), |
|
80 |
branch.controlfilename('work-cache')) |
|
81 |
||
82 |
||
83 |
||
84 |
def load_cache(branch): |
|
85 |
cache = {} |
|
86 |
||
87 |
try: |
|
88 |
cachefile = branch.controlfile('work-cache', 'rt') |
|
89 |
except IOError: |
|
90 |
return cache |
|
91 |
||
92 |
for l in cachefile: |
|
93 |
f = l.split(' ') |
|
94 |
file_id = f[0] |
|
95 |
if file_id in cache: |
|
96 |
raise BzrError("duplicated file_id in cache: {%s}" % file_id) |
|
97 |
cache[file_id] = (f[0], f[1], a2b_qp(f[2])) + tuple([long(x) for x in f[3:]]) |
|
98 |
return cache |
|
99 |
||
100 |
||
101 |
||
102 |
||
103 |
def _files_from_inventory(inv): |
|
104 |
for path, ie in inv.iter_entries(): |
|
105 |
if ie.kind != 'file': |
|
106 |
continue
|
|
107 |
yield ie.file_id, path |
|
108 |
||
109 |
||
110 |
def build_cache(branch): |
|
111 |
inv = branch.read_working_inventory() |
|
112 |
||
113 |
cache = {} |
|
114 |
_update_cache_from_list(branch, cache, _files_from_inventory(inv)) |
|
115 |
||
116 |
||
117 |
||
118 |
def update_cache(branch, inv): |
|
119 |
# TODO: It's supposed to be faster to stat the files in order by inum.
|
|
120 |
# We don't directly know the inum of the files of course but we do
|
|
121 |
# know where they were last sighted, so we can sort by that.
|
|
122 |
||
123 |
cache = load_cache(branch) |
|
124 |
return _update_cache_from_list(branch, cache, _files_from_inventory(inv)) |
|
125 |
||
126 |
||
127 |
||
128 |
def _update_cache_from_list(branch, cache, to_update): |
|
129 |
"""Update the cache to have info on the named files. |
|
130 |
||
131 |
to_update is a sequence of (file_id, path) pairs.
|
|
132 |
"""
|
|
133 |
hardcheck = dirty = 0 |
|
134 |
for file_id, path in to_update: |
|
135 |
fap = branch.abspath(path) |
|
136 |
fp = fingerprint(fap, path) |
|
137 |
cacheentry = cache.get(file_id) |
|
138 |
||
139 |
if fp == None: # not here |
|
140 |
if cacheentry: |
|
141 |
del cache[file_id] |
|
142 |
dirty += 1 |
|
143 |
continue
|
|
144 |
||
145 |
if cacheentry and (cacheentry[3:] == fp): |
|
146 |
continue # all stat fields unchanged |
|
147 |
||
148 |
hardcheck += 1 |
|
149 |
||
150 |
dig = sha.new(file(fap, 'rb').read()).hexdigest() |
|
151 |
||
152 |
if cacheentry == None or dig != cacheentry[1]: |
|
153 |
# if there was no previous entry for this file, or if the
|
|
154 |
# SHA has changed, then update the cache
|
|
155 |
cacheentry = (file_id, dig, path) + fp |
|
156 |
cache[file_id] = cacheentry |
|
157 |
dirty += 1 |
|
158 |
||
159 |
mutter('work cache: read %d files, %d changed' % (hardcheck, dirty)) |
|
160 |
||
161 |
if dirty: |
|
162 |
write_cache(branch, cache.itervalues()) |
|
163 |
||
164 |
return cache |