24
import os, tempfile, types, osutils, gzip, errno
25
32
from stat import ST_SIZE
26
33
from StringIO import StringIO
27
from trace import mutter
35
from bzrlib.errors import BzrError, UnlistableStore
36
from bzrlib.trace import mutter
38
import bzrlib.osutils as osutils
40
#from bzrlib.remotebranch import get_url
29
44
######################################################################
55
70
>>> st.add(StringIO('goodbye'), '123123')
56
71
>>> st['123123'].read()
76
"""Return (count, bytes)
78
This is the (compressed) size stored on disk, not the size of
84
total += self._item_size(fid)
88
class ImmutableStore(Store):
89
"""Store that stores files on disk.
59
91
TODO: Atomic add by writing to a temporary file and renaming.
92
TODO: Guard against the same thing being stored twice, compressed and
93
uncompressed during copy_multi_immutable - the window is for a
94
matching store with some crack code that lets it offer a
95
non gz FOO and then a fz FOO.
61
97
In bzr 0.0.5 and earlier, files within the store were marked
62
98
readonly on disk. This is no longer done but existing stores need
66
102
def __init__(self, basedir):
103
super(ImmutableStore, self).__init__()
67
104
self._basedir = basedir
70
if '\\' in id or '/' in id:
71
raise ValueError("invalid store id %r" % id)
72
return os.path.join(self._basedir, id)
106
def _path(self, entry_id):
107
if not isinstance(entry_id, basestring):
108
raise TypeError(type(entry_id))
109
if '\\' in entry_id or '/' in entry_id:
110
raise ValueError("invalid store id %r" % entry_id)
111
return os.path.join(self._basedir, entry_id)
74
113
def __repr__(self):
75
114
return "%s(%r)" % (self.__class__.__name__, self._basedir)
113
def copy_multi(self, other, ids):
151
def copy_multi(self, other, ids, permit_failure=False):
114
152
"""Copy texts for ids from other into self.
116
If an id is present in self, it is skipped. A count of copied
117
ids is returned, which may be less than len(ids).
154
If an id is present in self, it is skipped.
156
Returns (count_copied, failed), where failed is a collection of ids
157
that could not be copied.
119
from bzrlib.progress import ProgressBar
159
pb = bzrlib.ui.ui_factory.progress_bar()
121
161
pb.update('preparing to copy')
122
162
to_copy = [id for id in ids if id not in self]
123
163
if isinstance(other, ImmutableStore):
124
return self.copy_multi_immutable(other, to_copy, pb)
164
return self.copy_multi_immutable(other, to_copy, pb,
165
permit_failure=permit_failure)
126
168
for id in to_copy:
128
170
pb.update('copy', count, len(to_copy))
129
self.add(other[id], id)
130
assert count == len(to_copy)
171
if not permit_failure:
172
self.add(other[id], id)
181
if not permit_failure:
182
assert count == len(to_copy)
135
def copy_multi_immutable(self, other, to_copy, pb):
136
from shutil import copyfile
186
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
138
189
for id in to_copy:
139
190
p = self._path(id)
140
191
other_p = other._path(id)
193
osutils.link_or_copy(other_p, p)
194
except (IOError, OSError), e:
144
195
if e.errno == errno.ENOENT:
145
copyfile(other_p+".gz", p+".gz")
196
if not permit_failure:
197
osutils.link_or_copy(other_p+".gz", p+".gz")
200
osutils.link_or_copy(other_p+".gz", p+".gz")
202
if e.errno == errno.ENOENT:
159
218
return (os.access(p, os.R_OK)
160
219
or os.access(p + '.gz', os.R_OK))
162
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
221
def _item_size(self, fid):
224
return os.stat(p)[ST_SIZE]
226
return os.stat(p + '.gz')[ST_SIZE]
228
# TODO: Guard against the same thing being stored twice,
229
# compressed and uncompressed
164
231
def __iter__(self):
165
232
for f in os.listdir(self._basedir):
229
275
os.rmdir(self._basedir)
230
276
mutter("%r destroyed" % self)
279
class ImmutableMemoryStore(Store):
280
"""A memory only store."""
283
super(ImmutableMemoryStore, self).__init__()
286
def add(self, stream, fileid, compressed=True):
287
if self._contents.has_key(fileid):
288
raise StoreError("fileid %s already in the store" % fileid)
289
self._contents[fileid] = stream.read()
291
def __getitem__(self, fileid):
292
"""Returns a file reading from a particular entry."""
293
if not self._contents.has_key(fileid):
295
return StringIO(self._contents[fileid])
297
def _item_size(self, fileid):
298
return len(self._contents[fileid])
301
return iter(self._contents.keys())
304
class RemoteStore(object):
306
def __init__(self, baseurl):
307
self._baseurl = baseurl
309
def _path(self, name):
311
raise ValueError('invalid store id', name)
312
return self._baseurl + '/' + name
314
def __getitem__(self, fileid):
316
from bzrlib.remotebranch import get_url
317
p = self._path(fileid)
319
return get_url(p, compressed=True)
320
except urllib2.URLError:
323
return get_url(p, compressed=False)
324
except urllib2.URLError:
325
raise KeyError(fileid)
327
def __contains__(self, fileid):
336
"""A store that caches data locally, to avoid repeated downloads.
337
The precacache method should be used to avoid server round-trips for
341
def __init__(self, store, cache_dir):
342
self.source_store = store
343
self.cache_store = ImmutableStore(cache_dir)
345
def __getitem__(self, id):
346
mutter("Cache add %s" % id)
347
if id not in self.cache_store:
348
self.cache_store.add(self.source_store[id], id)
349
return self.cache_store[id]
351
def prefetch(self, ids):
352
"""Copy a series of ids into the cache, before they are used.
353
For remote stores that support pipelining or async downloads, this can
354
increase speed considerably.
355
Failures while prefetching are ignored.
357
mutter("Prefetch of ids %s" % ",".join(ids))
358
self.cache_store.copy_multi(self.source_store, ids,
362
def copy_all(store_from, store_to):
363
"""Copy all ids from one store to another."""
364
if not hasattr(store_from, "__iter__"):
365
raise UnlistableStore(store_from)
366
ids = [f for f in store_from]
367
store_to.copy_multi(store_from, ids)