53
57
def __repr__(self):
54
58
return 'KnitParentsProvider(%r)' % self._knit
56
def get_parent_map(self, keys):
57
"""See graph.StackedParentsProvider.get_parent_map"""
59
for revision_id in keys:
60
if revision_id is None:
61
raise ValueError('get_parent_map(None) is not valid')
60
def get_parents(self, revision_ids):
62
for revision_id in revision_ids:
62
63
if revision_id == _mod_revision.NULL_REVISION:
63
parent_map[revision_id] = ()
67
self._knit.get_parents_with_ghosts(revision_id))
67
parents = self._knit.get_parents_with_ghosts(revision_id)
68
68
except errors.RevisionNotPresent:
71
71
if len(parents) == 0:
72
parents = (_mod_revision.NULL_REVISION,)
73
parent_map[revision_id] = parents
77
class _KnitsParentsProvider(object):
79
def __init__(self, knit, prefix=()):
80
"""Create a parent provider for string keys mapped to tuple keys."""
85
return 'KnitsParentsProvider(%r)' % self._knit
87
def get_parent_map(self, keys):
88
"""See graph.StackedParentsProvider.get_parent_map"""
89
parent_map = self._knit.get_parent_map(
90
[self._prefix + (key,) for key in keys])
92
for key, parents in parent_map.items():
95
parents = (_mod_revision.NULL_REVISION,)
97
parents = tuple(parent[-1] for parent in parents)
98
result[revid] = parents
99
for revision_id in keys:
100
if revision_id == _mod_revision.NULL_REVISION:
101
result[revision_id] = ()
72
parents = [_mod_revision.NULL_REVISION]
73
parents_list.append(parents)
105
77
class KnitRepository(MetaDirRepository):
106
78
"""Knit format repository."""
108
# These attributes are inherited from the Repository base class. Setting
109
# them to None ensures that if the constructor is changed to not initialize
110
# them, or a subclass fails to call the constructor, that an error will
111
# occur rather than the system working but generating incorrect data.
80
# make an manually, or incorrectly initialised KnitRepository object
112
82
_commit_builder_class = None
113
83
_serializer = None
115
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
117
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files)
85
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
86
control_store, text_store, _commit_builder_class, _serializer):
87
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files,
88
_revision_store, control_store, text_store)
118
89
self._commit_builder_class = _commit_builder_class
119
90
self._serializer = _serializer
120
self._reconcile_fixes_text_parents = True
92
def _warn_if_deprecated(self):
93
# This class isn't deprecated
96
def _inventory_add_lines(self, inv_vf, revid, parents, lines, check_content):
97
return inv_vf.add_lines_with_ghosts(revid, parents, lines,
98
check_content=check_content)[0]
123
101
def _all_revision_ids(self):
124
102
"""See Repository.all_revision_ids()."""
125
return [key[0] for key in self.revisions.keys()]
127
def _activate_new_inventory(self):
128
"""Put a replacement inventory.new into use as inventories."""
129
# Copy the content across
131
t.copy('inventory.new.kndx', 'inventory.kndx')
133
t.copy('inventory.new.knit', 'inventory.knit')
134
except errors.NoSuchFile:
135
# empty inventories knit
136
t.delete('inventory.knit')
137
# delete the temp inventory
138
t.delete('inventory.new.kndx')
140
t.delete('inventory.new.knit')
141
except errors.NoSuchFile:
142
# empty inventories knit
144
# Force index reload (sanity check)
145
self.inventories._index._reset_cache()
146
self.inventories.keys()
148
def _backup_inventory(self):
150
t.copy('inventory.kndx', 'inventory.backup.kndx')
151
t.copy('inventory.knit', 'inventory.backup.knit')
153
def _move_file_id(self, from_id, to_id):
154
t = self._transport.clone('knits')
155
from_rel_url = self.texts._index._mapper.map((from_id, None))
156
to_rel_url = self.texts._index._mapper.map((to_id, None))
157
# We expect both files to always exist in this case.
158
for suffix in ('.knit', '.kndx'):
159
t.rename(from_rel_url + suffix, to_rel_url + suffix)
161
def _remove_file_id(self, file_id):
162
t = self._transport.clone('knits')
163
rel_url = self.texts._index._mapper.map((file_id, None))
164
for suffix in ('.kndx', '.knit'):
166
t.delete(rel_url + suffix)
167
except errors.NoSuchFile:
170
def _temp_inventories(self):
171
result = self._format._get_inventories(self._transport, self,
173
# Reconciling when the output has no revisions would result in no
174
# writes - but we want to ensure there is an inventory for
175
# compatibility with older clients that don't lazy-load.
176
result.get_parent_map([('A',)])
103
# Knits get the revision graph from the index of the revision knit, so
104
# it's always possible even if they're on an unlistable transport.
105
return self._revision_store.all_revision_ids(self.get_transaction())
179
107
def fileid_involved_between_revs(self, from_revid, to_revid):
180
108
"""Find file_id(s) which are involved in the changes between revisions.
202
132
return self._fileid_involved_by_set(changed)
205
def get_revision(self, revision_id):
206
"""Return the Revision object for a named revision"""
207
revision_id = osutils.safe_revision_id(revision_id)
208
return self.get_revision_reconcile(revision_id)
210
def _refresh_data(self):
211
if not self.is_locked():
213
# Create a new transaction to force all knits to see the scope change.
214
# This is safe because we're outside a write group.
215
self.control_files._finish_transaction()
216
if self.is_write_locked():
217
self.control_files._set_write_transaction()
219
self.control_files._set_read_transaction()
135
def get_ancestry(self, revision_id, topo_sorted=True):
136
"""Return a list of revision-ids integrated by a revision.
138
This is topologically sorted, unless 'topo_sorted' is specified as
141
if _mod_revision.is_null(revision_id):
143
revision_id = osutils.safe_revision_id(revision_id)
144
vf = self._get_revision_vf()
146
return [None] + vf.get_ancestry(revision_id, topo_sorted)
147
except errors.RevisionNotPresent:
148
raise errors.NoSuchRevision(self, revision_id)
151
def get_revision_graph(self, revision_id=None):
152
"""Return a dictionary containing the revision graph.
154
:param revision_id: The revision_id to get a graph from. If None, then
155
the entire revision graph is returned. This is a deprecated mode of
156
operation and will be removed in the future.
157
:return: a dictionary of revision_id->revision_parents_list.
159
if 'evil' in debug.debug_flags:
161
"get_revision_graph scales with size of history.")
162
# special case NULL_REVISION
163
if revision_id == _mod_revision.NULL_REVISION:
165
revision_id = osutils.safe_revision_id(revision_id)
166
a_weave = self._get_revision_vf()
167
if revision_id is None:
168
return a_weave.get_graph()
169
if revision_id not in a_weave:
170
raise errors.NoSuchRevision(self, revision_id)
172
# add what can be reached from revision_id
173
return a_weave.get_graph([revision_id])
176
def get_revision_graph_with_ghosts(self, revision_ids=None):
177
"""Return a graph of the revisions with ghosts marked as applicable.
179
:param revision_ids: an iterable of revisions to graph or None for all.
180
:return: a Graph object with the graph reachable from revision_ids.
182
if 'evil' in debug.debug_flags:
184
"get_revision_graph_with_ghosts scales with size of history.")
185
result = deprecated_graph.Graph()
186
vf = self._get_revision_vf()
187
versions = set(vf.versions())
189
pending = set(self.all_revision_ids())
192
pending = set(osutils.safe_revision_id(r) for r in revision_ids)
193
# special case NULL_REVISION
194
if _mod_revision.NULL_REVISION in pending:
195
pending.remove(_mod_revision.NULL_REVISION)
196
required = set(pending)
199
revision_id = pending.pop()
200
if not revision_id in versions:
201
if revision_id in required:
202
raise errors.NoSuchRevision(self, revision_id)
204
result.add_ghost(revision_id)
205
# mark it as done so we don't try for it again.
206
done.add(revision_id)
208
parent_ids = vf.get_parents_with_ghosts(revision_id)
209
for parent_id in parent_ids:
210
# is this queued or done ?
211
if (parent_id not in pending and
212
parent_id not in done):
214
pending.add(parent_id)
215
result.add_node(revision_id, parent_ids)
216
done.add(revision_id)
219
def _get_revision_vf(self):
220
""":return: a versioned file containing the revisions."""
221
vf = self._revision_store.get_revision_file(self.get_transaction())
224
def _get_history_vf(self):
225
"""Get a versionedfile whose history graph reflects all revisions.
227
For knit repositories, this is the revision knit.
229
return self._get_revision_vf()
221
231
@needs_write_lock
222
232
def reconcile(self, other=None, thorough=False):
225
235
reconciler = KnitReconciler(self, thorough=thorough)
226
236
reconciler.reconcile()
227
237
return reconciler
239
def revision_parents(self, revision_id):
240
revision_id = osutils.safe_revision_id(revision_id)
241
return self._get_revision_vf().get_parents(revision_id)
229
243
def _make_parents_provider(self):
230
return _KnitsParentsProvider(self.revisions)
232
def _find_inconsistent_revision_parents(self, revisions_iterator=None):
233
"""Find revisions with different parent lists in the revision object
234
and in the index graph.
236
:param revisions_iterator: None, or an iterator of (revid,
237
Revision-or-None). This iterator controls the revisions checked.
238
:returns: an iterator yielding tuples of (revison-id, parents-in-index,
239
parents-in-revision).
241
if not self.is_locked():
242
raise AssertionError()
244
if revisions_iterator is None:
245
revisions_iterator = self._iter_revisions(None)
246
for revid, revision in revisions_iterator:
249
parent_map = vf.get_parent_map([(revid,)])
250
parents_according_to_index = tuple(parent[-1] for parent in
251
parent_map[(revid,)])
252
parents_according_to_revision = tuple(revision.parent_ids)
253
if parents_according_to_index != parents_according_to_revision:
254
yield (revid, parents_according_to_index,
255
parents_according_to_revision)
257
def _check_for_inconsistent_revision_parents(self):
258
inconsistencies = list(self._find_inconsistent_revision_parents())
260
raise errors.BzrCheckError(
261
"Revision knit has inconsistent parents.")
263
def revision_graph_can_have_wrong_parents(self):
264
# The revision.kndx could potentially claim a revision has a different
265
# parent to the revision text.
244
return _KnitParentsProvider(self._get_revision_vf())
269
247
class RepositoryFormatKnit(MetaDirRepositoryFormat):
270
"""Bzr repository knit format (generalized).
248
"""Bzr repository knit format (generalized).
272
250
This repository format has:
273
251
- knits for file texts and inventory
289
267
_commit_builder_class = None
290
268
# Set this attribute in derived clases to control the _serializer that the
291
269
# repository objects will have passed to their constructor.
293
def _serializer(self):
294
return xml5.serializer_v5
295
# Knit based repositories handle ghosts reasonably well.
296
supports_ghosts = True
297
# External lookups are not supported in this format.
298
supports_external_lookups = False
300
supports_chks = False
301
_fetch_order = 'topological'
302
_fetch_uses_deltas = True
305
def _get_inventories(self, repo_transport, repo, name='inventory'):
306
mapper = versionedfile.ConstantMapper(name)
307
index = _mod_knit._KndxIndex(repo_transport, mapper,
308
repo.get_transaction, repo.is_write_locked, repo.is_locked)
309
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
310
return _mod_knit.KnitVersionedFiles(index, access, annotated=False)
312
def _get_revisions(self, repo_transport, repo):
313
mapper = versionedfile.ConstantMapper('revisions')
314
index = _mod_knit._KndxIndex(repo_transport, mapper,
315
repo.get_transaction, repo.is_write_locked, repo.is_locked)
316
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
317
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
320
def _get_signatures(self, repo_transport, repo):
321
mapper = versionedfile.ConstantMapper('signatures')
322
index = _mod_knit._KndxIndex(repo_transport, mapper,
323
repo.get_transaction, repo.is_write_locked, repo.is_locked)
324
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
325
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
328
def _get_texts(self, repo_transport, repo):
329
mapper = versionedfile.HashEscapedPrefixMapper()
330
base_transport = repo_transport.clone('knits')
331
index = _mod_knit._KndxIndex(base_transport, mapper,
332
repo.get_transaction, repo.is_write_locked, repo.is_locked)
333
access = _mod_knit._KnitKeyAccess(base_transport, mapper)
334
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=200,
270
_serializer = xml5.serializer_v5
272
def _get_control_store(self, repo_transport, control_files):
273
"""Return the control store for this repository."""
274
return VersionedFileStore(
277
file_mode=control_files._file_mode,
278
versionedfile_class=knit.KnitVersionedFile,
279
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
282
def _get_revision_store(self, repo_transport, control_files):
283
"""See RepositoryFormat._get_revision_store()."""
284
versioned_file_store = VersionedFileStore(
286
file_mode=control_files._file_mode,
289
versionedfile_class=knit.KnitVersionedFile,
290
versionedfile_kwargs={'delta':False,
291
'factory':knit.KnitPlainFactory(),
295
return KnitRevisionStore(versioned_file_store)
297
def _get_text_store(self, transport, control_files):
298
"""See RepositoryFormat._get_text_store()."""
299
return self._get_versioned_file_store('knits',
302
versionedfile_class=knit.KnitVersionedFile,
303
versionedfile_kwargs={
304
'create_parent_dir':True,
306
'dir_mode':control_files._dir_mode,
337
310
def initialize(self, a_bzrdir, shared=False):
338
311
"""Create a knit format 1 repository.
342
315
:param shared: If true the repository will be initialized as a shared
345
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
318
mutter('creating repository in %s.', a_bzrdir.transport.base)
348
321
utf8_files = [('format', self.get_format_string())]
350
323
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
351
324
repo_transport = a_bzrdir.get_repository_transport(None)
352
325
control_files = lockable_files.LockableFiles(repo_transport,
353
326
'lock', lockdir.LockDir)
327
control_store = self._get_control_store(repo_transport, control_files)
354
328
transaction = transactions.WriteTransaction()
355
result = self.open(a_bzrdir=a_bzrdir, _found=True)
329
# trigger a write of the inventory store.
330
control_store.get_weave_or_empty('inventory', transaction)
331
_revision_store = self._get_revision_store(repo_transport, control_files)
357
332
# the revision id here is irrelevant: it will not be stored, and cannot
358
# already exist, we do this to create files on disk for older clients.
359
result.inventories.get_parent_map([('A',)])
360
result.revisions.get_parent_map([('A',)])
361
result.signatures.get_parent_map([('A',)])
363
self._run_post_repo_init_hooks(result, a_bzrdir, shared)
334
_revision_store.has_revision_id('A', transaction)
335
_revision_store.get_signature_file(transaction)
336
return self.open(a_bzrdir=a_bzrdir, _found=True)
366
338
def open(self, a_bzrdir, _found=False, _override_transport=None):
367
339
"""See RepositoryFormat.open().
369
341
:param _override_transport: INTERNAL USE ONLY. Allows opening the
370
342
repository at a slightly different url
371
343
than normal. I.e. during 'upgrade'.
374
346
format = RepositoryFormat.find_format(a_bzrdir)
347
assert format.__class__ == self.__class__
375
348
if _override_transport is not None:
376
349
repo_transport = _override_transport
378
351
repo_transport = a_bzrdir.get_repository_transport(None)
379
352
control_files = lockable_files.LockableFiles(repo_transport,
380
353
'lock', lockdir.LockDir)
381
repo = self.repository_class(_format=self,
354
text_store = self._get_text_store(repo_transport, control_files)
355
control_store = self._get_control_store(repo_transport, control_files)
356
_revision_store = self._get_revision_store(repo_transport, control_files)
357
return self.repository_class(_format=self,
382
358
a_bzrdir=a_bzrdir,
383
359
control_files=control_files,
360
_revision_store=_revision_store,
361
control_store=control_store,
362
text_store=text_store,
384
363
_commit_builder_class=self._commit_builder_class,
385
364
_serializer=self._serializer)
386
repo.revisions = self._get_revisions(repo_transport, repo)
387
repo.signatures = self._get_signatures(repo_transport, repo)
388
repo.inventories = self._get_inventories(repo_transport, repo)
389
repo.texts = self._get_texts(repo_transport, repo)
390
repo.chk_bytes = None
391
repo._transport = repo_transport
395
367
class RepositoryFormatKnit1(RepositoryFormatKnit):
468
446
return "Knit repository format 3"
471
class RepositoryFormatKnit4(RepositoryFormatKnit):
472
"""Bzr repository knit format 4.
474
This repository format has everything in format 3, except for
476
- knits for file texts and inventory
477
- hash subdirectory based stores.
478
- knits for revisions and signatures
479
- TextStores for revisions and signatures.
480
- a format marker of its own
481
- an optional 'shared-storage' flag
482
- an optional 'no-working-trees' flag
484
- support for recording full info about the tree root
449
def _get_stream_as_bytes(knit, required_versions):
450
"""Generate a serialised data stream.
452
The format is a bencoding of a list. The first element of the list is a
453
string of the format signature, then each subsequent element is a list
454
corresponding to a record. Those lists contain:
461
:returns: a bencoded list.
487
repository_class = KnitRepository
488
_commit_builder_class = RootCommitBuilder
489
rich_root_data = True
490
supports_tree_reference = False
492
def _serializer(self):
493
return xml6.serializer_v6
495
def _get_matching_bzrdir(self):
496
return bzrdir.format_registry.make_bzrdir('rich-root')
498
def _ignore_setting_bzrdir(self, format):
501
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
503
def get_format_string(self):
504
"""See RepositoryFormat.get_format_string()."""
505
return 'Bazaar Knit Repository Format 4 (bzr 1.0)\n'
507
def get_format_description(self):
508
"""See RepositoryFormat.get_format_description()."""
509
return "Knit repository format 4"
463
knit_stream = knit.get_data_stream(required_versions)
464
format_signature, data_list, callable = knit_stream
466
data.append(format_signature)
467
for version, options, length, parents in data_list:
468
data.append([version, options, parents, callable(length)])
469
return bencode.bencode(data)