43
51
class Store(object):
44
"""This class represents the abstract storage layout for saving information.
52
"""An abstract store that holds files indexed by unique names.
46
54
Files can be added, but not modified once they are in. Typically
47
55
the hash is used as the name, or something else known to be unique,
58
>>> st = ImmutableScratchStore()
60
>>> st.add(StringIO('hello'), 'aa')
66
You are not allowed to add an id that is already present.
68
Entries can be retrieved as files, which may then be read.
70
>>> st.add(StringIO('goodbye'), '123123')
71
>>> st['123123'].read()
52
raise NotImplementedError('Children should define their length')
54
def get(self, fileid, suffix=None):
55
"""Returns a file reading from a particular entry.
57
If suffix is present, retrieve the named suffix for fileid.
59
raise NotImplementedError
61
def __getitem__(self, fileid):
62
"""DEPRECATED. Please use .get(fileid) instead."""
63
raise NotImplementedError
66
raise NotImplementedError
68
def add(self, f, fileid):
69
"""Add a file object f to the store accessible from the given fileid"""
70
raise NotImplementedError('Children of Store must define their method of adding entries.')
72
def has_id(self, fileid, suffix=None):
73
"""Return True or false for the presence of fileid in the store.
75
suffix, if present, is a per file suffix, i.e. for digital signature
77
raise NotImplementedError
80
"""Return True if this store is able to be listed."""
81
return (getattr(self, "__iter__", None) is not None)
83
def copy_all_ids(self, store_from, pb=None):
84
"""Copy all the file ids from store_from into self."""
85
if not store_from.listable():
86
raise UnlistableStore(store_from)
88
for count, file_id in enumerate(store_from):
90
pb.update('listing files', count, count)
94
mutter('copy_all ids: %r', ids)
95
self.copy_multi(store_from, ids, pb=pb)
97
def copy_multi(self, other, ids, pb=None, permit_failure=False):
98
"""Copy texts for ids from other into self.
100
If an id is present in self, it is skipped. A count of copied
101
ids is returned, which may be less than len(ids).
103
:param other: Another Store object
104
:param ids: A list of entry ids to be copied
105
:param pb: A ProgressTask object, if none is given, the default will be created.
106
:param permit_failure: Allow missing entries to be ignored
107
:return: (n_copied, [failed]) The number of entries copied successfully,
108
followed by a list of entries which could not be copied (because they
112
pb.update('preparing to copy')
117
if self.has_id(fileid):
120
self._copy_one(fileid, None, other, pb)
121
for suffix in self._suffixes:
123
self._copy_one(fileid, suffix, other, pb)
127
pb.update('copy', count, len(ids))
137
def _copy_one(self, fileid, suffix, other, pb):
138
"""Most generic copy-one object routine.
140
Subclasses can override this to provide an optimised
141
copy between their own instances. Such overriden routines
142
should call this if they have no optimised facility for a
145
mutter('Store._copy_one: %r', fileid)
146
f = other.get(fileid, suffix)
147
self.add(f, fileid, suffix)
150
class TransportStore(Store):
151
"""A TransportStore is a Store superclass for Stores that use Transports."""
153
def add(self, f, fileid, suffix=None):
154
"""Add contents of a file into the store.
156
f -- A file-like object
158
mutter("add store entry %r", fileid)
159
names = self._id_to_names(fileid, suffix)
160
if self._transport.has_any(names):
161
raise BzrError("store %r already contains id %r"
162
% (self._transport.base, fileid))
164
# Most of the time, just adding the file will work
165
# if we find a time where it fails, (because the dir
166
# doesn't exist), then create the dir, and try again
167
self._add(names[0], f)
169
def _add(self, relpath, f):
170
"""Actually add the file to the given location.
171
This should be overridden by children.
173
raise NotImplementedError('children need to implement this function.')
175
def _check_fileid(self, fileid):
176
if type(fileid) != str:
177
raise TypeError('Fileids should be bytestrings: %s %r' % (
178
type(fileid), fileid))
179
if '\\' in fileid or '/' in fileid:
180
raise ValueError("invalid store id %r" % fileid)
182
def _id_to_names(self, fileid, suffix):
183
"""Return the names in the expected order"""
184
if suffix is not None:
185
fn = self._relpath(fileid, [suffix])
187
fn = self._relpath(fileid)
189
# FIXME RBC 20051128 this belongs in TextStore.
196
def has_id(self, fileid, suffix=None):
197
"""See Store.has_id."""
198
return self._transport.has_any(self._id_to_names(fileid, suffix))
200
def _get_name(self, fileid, suffix=None):
201
"""A special check, which returns the name of an existing file.
203
This is similar in spirit to 'has_id', but it is designed
204
to return information about which file the store has.
206
for name in self._id_to_names(fileid, suffix=suffix):
207
if self._transport.has(name):
211
def _get(self, filename):
212
"""Return an vanilla file stream for clients to read from.
214
This is the body of a template method on 'get', and should be
215
implemented by subclasses.
217
raise NotImplementedError
219
def get(self, fileid, suffix=None):
220
"""See Store.get()."""
221
names = self._id_to_names(fileid, suffix)
224
return self._get(name)
225
except errors.NoSuchFile:
227
raise KeyError(fileid)
229
def __init__(self, a_transport, prefixed=False, compressed=False,
230
dir_mode=None, file_mode=None,
232
super(TransportStore, self).__init__()
233
self._transport = a_transport
234
self._prefixed = prefixed
235
# FIXME RBC 20051128 this belongs in TextStore.
236
self._compressed = compressed
237
self._suffixes = set()
238
self._escaped = escaped
240
# It is okay for these to be None, it just means they
241
# will just use the filesystem defaults
242
self._dir_mode = dir_mode
243
self._file_mode = file_mode
244
# Create a key mapper to use
245
if escaped and prefixed:
246
self._mapper = versionedfile.HashEscapedPrefixMapper()
247
elif not escaped and prefixed:
248
self._mapper = versionedfile.HashPrefixMapper()
251
"%r: escaped unprefixed stores are not permitted."
254
self._mapper = versionedfile.PrefixMapper()
256
def _iter_files_recursive(self):
257
"""Iterate through the files in the transport."""
258
for quoted_relpath in self._transport.iter_files_recursive():
262
for relpath in self._iter_files_recursive():
263
# worst case is one of each suffix.
264
name = os.path.basename(relpath)
265
if name.endswith('.gz'):
268
for count in range(len(self._suffixes)):
269
for suffix in self._suffixes:
270
if name.endswith('.' + suffix):
273
yield self._mapper.unmap(name)[0]
276
return len(list(self.__iter__()))
278
def _relpath(self, fileid, suffixes=None):
279
self._check_fileid(fileid)
281
for suffix in suffixes:
282
if not suffix in self._suffixes:
283
raise ValueError("Unregistered suffix %r" % suffix)
284
self._check_fileid(suffix)
287
path = self._mapper.map((fileid,))
288
full_path = '.'.join([path] + suffixes)
292
if self._transport is None:
293
return "%s(None)" % (self.__class__.__name__)
295
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
300
"""Return True if this store is able to be listed."""
301
return self._transport.listable()
303
def register_suffix(self, suffix):
304
"""Register a suffix as being expected in this store."""
305
self._check_fileid(suffix)
307
raise ValueError('You cannot register the "gz" suffix.')
308
self._suffixes.add(suffix)
310
75
def total_size(self):
311
76
"""Return (count, bytes)
317
for relpath in self._transport.iter_files_recursive():
319
total += self._transport.stat(relpath).st_size
84
total += self._item_size(fid)
321
85
return count, total
88
class ImmutableStore(Store):
89
"""Store that stores files on disk.
91
TODO: Atomic add by writing to a temporary file and renaming.
92
TODO: Guard against the same thing being stored twice, compressed and
93
uncompressed during copy_multi_immutable - the window is for a
94
matching store with some crack code that lets it offer a
95
non gz FOO and then a fz FOO.
97
In bzr 0.0.5 and earlier, files within the store were marked
98
readonly on disk. This is no longer done but existing stores need
102
def __init__(self, basedir):
103
super(ImmutableStore, self).__init__()
104
self._basedir = basedir
106
def _path(self, entry_id):
107
if not isinstance(entry_id, basestring):
108
raise TypeError(type(entry_id))
109
if '\\' in entry_id or '/' in entry_id:
110
raise ValueError("invalid store id %r" % entry_id)
111
return os.path.join(self._basedir, entry_id)
114
return "%s(%r)" % (self.__class__.__name__, self._basedir)
116
def add(self, f, fileid, compressed=True):
117
"""Add contents of a file into the store.
119
f -- An open file, or file-like object."""
120
# FIXME: Only works on files that will fit in memory
122
from bzrlib.atomicfile import AtomicFile
124
mutter("add store entry %r" % (fileid))
125
if isinstance(f, types.StringTypes):
130
p = self._path(fileid)
131
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
132
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
138
af = AtomicFile(fn, 'wb')
141
gf = gzip.GzipFile(mode='wb', fileobj=af)
151
def copy_multi(self, other, ids, permit_failure=False):
152
"""Copy texts for ids from other into self.
154
If an id is present in self, it is skipped.
156
Returns (count_copied, failed), where failed is a collection of ids
157
that could not be copied.
159
pb = bzrlib.ui.ui_factory.progress_bar()
161
pb.update('preparing to copy')
162
to_copy = [id for id in ids if id not in self]
163
if isinstance(other, ImmutableStore):
164
return self.copy_multi_immutable(other, to_copy, pb,
165
permit_failure=permit_failure)
170
pb.update('copy', count, len(to_copy))
171
if not permit_failure:
172
self.add(other[id], id)
181
if not permit_failure:
182
assert count == len(to_copy)
186
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
191
other_p = other._path(id)
193
osutils.link_or_copy(other_p, p)
194
except (IOError, OSError), e:
195
if e.errno == errno.ENOENT:
196
if not permit_failure:
197
osutils.link_or_copy(other_p+".gz", p+".gz")
200
osutils.link_or_copy(other_p+".gz", p+".gz")
202
if e.errno == errno.ENOENT:
210
pb.update('copy', count, len(to_copy))
211
assert count == len(to_copy)
215
def __contains__(self, fileid):
217
p = self._path(fileid)
218
return (os.access(p, os.R_OK)
219
or os.access(p + '.gz', os.R_OK))
221
def _item_size(self, fid):
224
return os.stat(p)[ST_SIZE]
226
return os.stat(p + '.gz')[ST_SIZE]
228
# TODO: Guard against the same thing being stored twice,
229
# compressed and uncompressed
232
for f in os.listdir(self._basedir):
234
# TODO: case-insensitive?
240
return len(os.listdir(self._basedir))
242
def __getitem__(self, fileid):
243
"""Returns a file reading from a particular entry."""
244
p = self._path(fileid)
246
return gzip.GzipFile(p + '.gz', 'rb')
248
if e.errno != errno.ENOENT:
254
if e.errno != errno.ENOENT:
257
raise KeyError(fileid)
260
class ImmutableScratchStore(ImmutableStore):
261
"""Self-destructing test subclass of ImmutableStore.
263
The Store only exists for the lifetime of the Python object.
264
Obviously you should not put anything precious in it.
267
super(ImmutableScratchStore, self).__init__(tempfile.mkdtemp())
270
for f in os.listdir(self._basedir):
271
fpath = os.path.join(self._basedir, f)
272
# needed on windows, and maybe some other filesystems
273
os.chmod(fpath, 0600)
275
os.rmdir(self._basedir)
276
mutter("%r destroyed" % self)
279
class ImmutableMemoryStore(Store):
280
"""A memory only store."""
283
super(ImmutableMemoryStore, self).__init__()
286
def add(self, stream, fileid, compressed=True):
287
if self._contents.has_key(fileid):
288
raise StoreError("fileid %s already in the store" % fileid)
289
self._contents[fileid] = stream.read()
291
def __getitem__(self, fileid):
292
"""Returns a file reading from a particular entry."""
293
if not self._contents.has_key(fileid):
295
return StringIO(self._contents[fileid])
297
def _item_size(self, fileid):
298
return len(self._contents[fileid])
301
return iter(self._contents.keys())
304
class RemoteStore(object):
306
def __init__(self, baseurl):
307
self._baseurl = baseurl
309
def _path(self, name):
311
raise ValueError('invalid store id', name)
312
return self._baseurl + '/' + name
314
def __getitem__(self, fileid):
316
from bzrlib.remotebranch import get_url
317
p = self._path(fileid)
319
return get_url(p, compressed=True)
320
except urllib2.URLError:
323
return get_url(p, compressed=False)
324
except urllib2.URLError:
325
raise KeyError(fileid)
327
def __contains__(self, fileid):
336
"""A store that caches data locally, to avoid repeated downloads.
337
The precacache method should be used to avoid server round-trips for
341
def __init__(self, store, cache_dir):
342
self.source_store = store
343
self.cache_store = ImmutableStore(cache_dir)
345
def __getitem__(self, id):
346
mutter("Cache add %s" % id)
347
if id not in self.cache_store:
348
self.cache_store.add(self.source_store[id], id)
349
return self.cache_store[id]
351
def prefetch(self, ids):
352
"""Copy a series of ids into the cache, before they are used.
353
For remote stores that support pipelining or async downloads, this can
354
increase speed considerably.
355
Failures while prefetching are ignored.
357
mutter("Prefetch of ids %s" % ",".join(ids))
358
self.cache_store.copy_multi(self.source_store, ids,
362
def copy_all(store_from, store_to):
363
"""Copy all ids from one store to another."""
364
if not hasattr(store_from, "__iter__"):
365
raise UnlistableStore(store_from)
366
ids = [f for f in store_from]
367
store_to.copy_multi(store_from, ids)