13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from ..lazy_import import lazy_import
18
lazy_import(globals(), """
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26
revision as _mod_revision,
30
from breezy.bzr import (
41
from ..repository import (
45
from ..bzr.repository import (
46
RepositoryFormatMetaDir,
48
from ..bzr.vf_repository import (
49
InterSameDataRepository,
50
MetaDirVersionedFileRepository,
51
MetaDirVersionedFileRepositoryFormat,
52
VersionedFileCommitBuilder,
56
class _KnitParentsProvider(object):
58
def __init__(self, knit):
62
return 'KnitParentsProvider(%r)' % self._knit
64
def get_parent_map(self, keys):
65
"""See graph.StackedParentsProvider.get_parent_map"""
67
for revision_id in keys:
68
if revision_id is None:
69
raise ValueError('get_parent_map(None) is not valid')
70
if revision_id == _mod_revision.NULL_REVISION:
71
parent_map[revision_id] = ()
75
self._knit.get_parents_with_ghosts(revision_id))
76
except errors.RevisionNotPresent:
80
parents = (_mod_revision.NULL_REVISION,)
81
parent_map[revision_id] = parents
85
class _KnitsParentsProvider(object):
87
def __init__(self, knit, prefix=()):
88
"""Create a parent provider for string keys mapped to tuple keys."""
93
return 'KnitsParentsProvider(%r)' % self._knit
95
def get_parent_map(self, keys):
96
"""See graph.StackedParentsProvider.get_parent_map"""
97
parent_map = self._knit.get_parent_map(
98
[self._prefix + (key,) for key in keys])
100
for key, parents in parent_map.items():
102
if len(parents) == 0:
103
parents = (_mod_revision.NULL_REVISION,)
105
parents = tuple(parent[-1] for parent in parents)
106
result[revid] = parents
107
for revision_id in keys:
108
if revision_id == _mod_revision.NULL_REVISION:
109
result[revision_id] = ()
113
class KnitRepository(MetaDirVersionedFileRepository):
30
from bzrlib.decorators import needs_read_lock, needs_write_lock
31
from bzrlib.repository import (
33
MetaDirRepositoryFormat,
37
import bzrlib.revision as _mod_revision
38
from bzrlib.store.versioned import VersionedFileStore
39
from bzrlib.trace import mutter, note, warning
42
class KnitRepository(MetaDirRepository):
114
43
"""Knit format repository."""
116
# These attributes are inherited from the Repository base class. Setting
117
# them to None ensures that if the constructor is changed to not initialize
118
# them, or a subclass fails to call the constructor, that an error will
119
# occur rather than the system working but generating incorrect data.
120
_commit_builder_class = None
123
def __init__(self, _format, a_controldir, control_files, _commit_builder_class,
125
super(KnitRepository, self).__init__(
126
_format, a_controldir, control_files)
127
self._commit_builder_class = _commit_builder_class
128
self._serializer = _serializer
129
self._reconcile_fixes_text_parents = True
45
_serializer = xml5.serializer_v5
47
def _warn_if_deprecated(self):
48
# This class isn't deprecated
51
def _inventory_add_lines(self, inv_vf, revid, parents, lines):
52
inv_vf.add_lines_with_ghosts(revid, parents, lines)
131
55
def _all_revision_ids(self):
132
56
"""See Repository.all_revision_ids()."""
133
with self.lock_read():
134
return [key[0] for key in self.revisions.keys()]
136
def _activate_new_inventory(self):
137
"""Put a replacement inventory.new into use as inventories."""
138
# Copy the content across
140
t.copy('inventory.new.kndx', 'inventory.kndx')
142
t.copy('inventory.new.knit', 'inventory.knit')
143
except errors.NoSuchFile:
144
# empty inventories knit
145
t.delete('inventory.knit')
146
# delete the temp inventory
147
t.delete('inventory.new.kndx')
149
t.delete('inventory.new.knit')
150
except errors.NoSuchFile:
151
# empty inventories knit
153
# Force index reload (sanity check)
154
self.inventories._index._reset_cache()
155
self.inventories.keys()
157
def _backup_inventory(self):
159
t.copy('inventory.kndx', 'inventory.backup.kndx')
160
t.copy('inventory.knit', 'inventory.backup.knit')
162
def _move_file_id(self, from_id, to_id):
163
t = self._transport.clone('knits')
164
from_rel_url = self.texts._index._mapper.map((from_id, None))
165
to_rel_url = self.texts._index._mapper.map((to_id, None))
166
# We expect both files to always exist in this case.
167
for suffix in ('.knit', '.kndx'):
168
t.rename(from_rel_url + suffix, to_rel_url + suffix)
170
def _remove_file_id(self, file_id):
171
t = self._transport.clone('knits')
172
rel_url = self.texts._index._mapper.map((file_id, None))
173
for suffix in ('.kndx', '.knit'):
175
t.delete(rel_url + suffix)
176
except errors.NoSuchFile:
179
def _temp_inventories(self):
180
result = self._format._get_inventories(self._transport, self,
182
# Reconciling when the output has no revisions would result in no
183
# writes - but we want to ensure there is an inventory for
184
# compatibility with older clients that don't lazy-load.
185
result.get_parent_map([(b'A',)])
57
# Knits get the revision graph from the index of the revision knit, so
58
# it's always possible even if they're on an unlistable transport.
59
return self._revision_store.all_revision_ids(self.get_transaction())
61
def fileid_involved_between_revs(self, from_revid, to_revid):
62
"""Find file_id(s) which are involved in the changes between revisions.
64
This determines the set of revisions which are involved, and then
65
finds all file ids affected by those revisions.
67
from_revid = osutils.safe_revision_id(from_revid)
68
to_revid = osutils.safe_revision_id(to_revid)
69
vf = self._get_revision_vf()
70
from_set = set(vf.get_ancestry(from_revid))
71
to_set = set(vf.get_ancestry(to_revid))
72
changed = to_set.difference(from_set)
73
return self._fileid_involved_by_set(changed)
75
def fileid_involved(self, last_revid=None):
76
"""Find all file_ids modified in the ancestry of last_revid.
78
:param last_revid: If None, last_revision() will be used.
81
changed = set(self.all_revision_ids())
83
changed = set(self.get_ancestry(last_revid))
86
return self._fileid_involved_by_set(changed)
89
def get_ancestry(self, revision_id):
90
"""Return a list of revision-ids integrated by a revision.
92
This is topologically sorted.
94
if revision_id is None:
96
revision_id = osutils.safe_revision_id(revision_id)
97
vf = self._get_revision_vf()
99
return [None] + vf.get_ancestry(revision_id)
100
except errors.RevisionNotPresent:
101
raise errors.NoSuchRevision(self, revision_id)
188
104
def get_revision(self, revision_id):
189
105
"""Return the Revision object for a named revision"""
190
with self.lock_read():
191
return self.get_revision_reconcile(revision_id)
193
def _refresh_data(self):
194
if not self.is_locked():
196
if self.is_in_write_group():
197
raise IsInWriteGroupError(self)
198
# Create a new transaction to force all knits to see the scope change.
199
# This is safe because we're outside a write group.
200
self.control_files._finish_transaction()
201
if self.is_write_locked():
202
self.control_files._set_write_transaction()
204
self.control_files._set_read_transaction()
106
revision_id = osutils.safe_revision_id(revision_id)
107
return self.get_revision_reconcile(revision_id)
110
def get_revision_graph(self, revision_id=None):
111
"""Return a dictionary containing the revision graph.
113
:param revision_id: The revision_id to get a graph from. If None, then
114
the entire revision graph is returned. This is a deprecated mode of
115
operation and will be removed in the future.
116
:return: a dictionary of revision_id->revision_parents_list.
118
# special case NULL_REVISION
119
if revision_id == _mod_revision.NULL_REVISION:
121
revision_id = osutils.safe_revision_id(revision_id)
122
a_weave = self._get_revision_vf()
123
entire_graph = a_weave.get_graph()
124
if revision_id is None:
125
return a_weave.get_graph()
126
elif revision_id not in a_weave:
127
raise errors.NoSuchRevision(self, revision_id)
129
# add what can be reached from revision_id
131
pending = set([revision_id])
132
while len(pending) > 0:
134
result[node] = a_weave.get_parents(node)
135
for revision_id in result[node]:
136
if revision_id not in result:
137
pending.add(revision_id)
141
def get_revision_graph_with_ghosts(self, revision_ids=None):
142
"""Return a graph of the revisions with ghosts marked as applicable.
144
:param revision_ids: an iterable of revisions to graph or None for all.
145
:return: a Graph object with the graph reachable from revision_ids.
147
result = graph.Graph()
148
vf = self._get_revision_vf()
149
versions = set(vf.versions())
151
pending = set(self.all_revision_ids())
154
pending = set(osutils.safe_revision_id(r) for r in revision_ids)
155
# special case NULL_REVISION
156
if _mod_revision.NULL_REVISION in pending:
157
pending.remove(_mod_revision.NULL_REVISION)
158
required = set(pending)
161
revision_id = pending.pop()
162
if not revision_id in versions:
163
if revision_id in required:
164
raise errors.NoSuchRevision(self, revision_id)
166
result.add_ghost(revision_id)
167
# mark it as done so we don't try for it again.
168
done.add(revision_id)
170
parent_ids = vf.get_parents_with_ghosts(revision_id)
171
for parent_id in parent_ids:
172
# is this queued or done ?
173
if (parent_id not in pending and
174
parent_id not in done):
176
pending.add(parent_id)
177
result.add_node(revision_id, parent_ids)
178
done.add(revision_id)
181
def _get_revision_vf(self):
182
""":return: a versioned file containing the revisions."""
183
vf = self._revision_store.get_revision_file(self.get_transaction())
186
def _get_history_vf(self):
187
"""Get a versionedfile whose history graph reflects all revisions.
189
For knit repositories, this is the revision knit.
191
return self._get_revision_vf()
206
194
def reconcile(self, other=None, thorough=False):
207
195
"""Reconcile this repository."""
208
from .reconcile import KnitReconciler
209
with self.lock_write():
210
reconciler = KnitReconciler(self, thorough=thorough)
211
return reconciler.reconcile()
213
def _make_parents_provider(self):
214
return _KnitsParentsProvider(self.revisions)
217
class RepositoryFormatKnit(MetaDirVersionedFileRepositoryFormat):
218
"""Bzr repository knit format (generalized).
196
from bzrlib.reconcile import KnitReconciler
197
reconciler = KnitReconciler(self, thorough=thorough)
198
reconciler.reconcile()
201
def revision_parents(self, revision_id):
202
revision_id = osutils.safe_revision_id(revision_id)
203
return self._get_revision_vf().get_parents(revision_id)
206
class KnitRepository3(KnitRepository):
208
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
209
control_store, text_store):
210
KnitRepository.__init__(self, _format, a_bzrdir, control_files,
211
_revision_store, control_store, text_store)
212
self._serializer = xml7.serializer_v7
214
def deserialise_inventory(self, revision_id, xml):
215
"""Transform the xml into an inventory object.
217
:param revision_id: The expected revision id of the inventory.
218
:param xml: A serialised inventory.
220
result = self._serializer.read_inventory_from_string(xml)
221
assert result.root.revision is not None
224
def serialise_inventory(self, inv):
225
"""Transform the inventory object into XML text.
227
:param revision_id: The expected revision id of the inventory.
228
:param xml: A serialised inventory.
230
assert inv.revision_id is not None
231
assert inv.root.revision is not None
232
return KnitRepository.serialise_inventory(self, inv)
234
def get_commit_builder(self, branch, parents, config, timestamp=None,
235
timezone=None, committer=None, revprops=None,
237
"""Obtain a CommitBuilder for this repository.
239
:param branch: Branch to commit to.
240
:param parents: Revision ids of the parents of the new revision.
241
:param config: Configuration to use.
242
:param timestamp: Optional timestamp recorded for commit.
243
:param timezone: Optional timezone for timestamp.
244
:param committer: Optional committer to set for commit.
245
:param revprops: Optional dictionary of revision properties.
246
:param revision_id: Optional revision id.
248
revision_id = osutils.safe_revision_id(revision_id)
249
return RootCommitBuilder(self, parents, config, timestamp, timezone,
250
committer, revprops, revision_id)
253
class RepositoryFormatKnit(MetaDirRepositoryFormat):
254
"""Bzr repository knit format (generalized).
220
256
This repository format has:
221
257
- knits for file texts and inventory
231
# Set this attribute in derived classes to control the repository class
232
# created by open and initialize.
233
repository_class = None
234
# Set this attribute in derived classes to control the
235
# _commit_builder_class that the repository objects will have passed to
237
_commit_builder_class = None
238
# Set this attribute in derived clases to control the _serializer that the
239
# repository objects will have passed to their constructor.
242
def _serializer(self):
243
return xml5.serializer_v5
244
# Knit based repositories handle ghosts reasonably well.
245
supports_ghosts = True
246
# External lookups are not supported in this format.
247
supports_external_lookups = False
249
supports_chks = False
250
_fetch_order = 'topological'
251
_fetch_uses_deltas = True
253
supports_funky_characters = True
254
# The revision.kndx could potentially claim a revision has a different
255
# parent to the revision text.
256
revision_graph_can_have_wrong_parents = True
258
def _get_inventories(self, repo_transport, repo, name='inventory'):
259
mapper = versionedfile.ConstantMapper(name)
260
index = _mod_knit._KndxIndex(repo_transport, mapper,
261
repo.get_transaction, repo.is_write_locked, repo.is_locked)
262
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
263
return _mod_knit.KnitVersionedFiles(index, access, annotated=False)
265
def _get_revisions(self, repo_transport, repo):
266
mapper = versionedfile.ConstantMapper('revisions')
267
index = _mod_knit._KndxIndex(repo_transport, mapper,
268
repo.get_transaction, repo.is_write_locked, repo.is_locked)
269
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
270
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
273
def _get_signatures(self, repo_transport, repo):
274
mapper = versionedfile.ConstantMapper('signatures')
275
index = _mod_knit._KndxIndex(repo_transport, mapper,
276
repo.get_transaction, repo.is_write_locked, repo.is_locked)
277
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
278
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
281
def _get_texts(self, repo_transport, repo):
282
mapper = versionedfile.HashEscapedPrefixMapper()
283
base_transport = repo_transport.clone('knits')
284
index = _mod_knit._KndxIndex(base_transport, mapper,
285
repo.get_transaction, repo.is_write_locked, repo.is_locked)
286
access = _mod_knit._KnitKeyAccess(base_transport, mapper)
287
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=200,
290
def initialize(self, a_controldir, shared=False):
267
def _get_control_store(self, repo_transport, control_files):
268
"""Return the control store for this repository."""
269
return VersionedFileStore(
272
file_mode=control_files._file_mode,
273
versionedfile_class=knit.KnitVersionedFile,
274
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
277
def _get_revision_store(self, repo_transport, control_files):
278
"""See RepositoryFormat._get_revision_store()."""
279
from bzrlib.store.revision.knit import KnitRevisionStore
280
versioned_file_store = VersionedFileStore(
282
file_mode=control_files._file_mode,
285
versionedfile_class=knit.KnitVersionedFile,
286
versionedfile_kwargs={'delta':False,
287
'factory':knit.KnitPlainFactory(),
291
return KnitRevisionStore(versioned_file_store)
293
def _get_text_store(self, transport, control_files):
294
"""See RepositoryFormat._get_text_store()."""
295
return self._get_versioned_file_store('knits',
298
versionedfile_class=knit.KnitVersionedFile,
299
versionedfile_kwargs={
300
'create_parent_dir':True,
302
'dir_mode':control_files._dir_mode,
306
def initialize(self, a_bzrdir, shared=False):
291
307
"""Create a knit format 1 repository.
293
:param a_controldir: bzrdir to contain the new repository; must already
309
:param a_bzrdir: bzrdir to contain the new repository; must already
295
311
:param shared: If true the repository will be initialized as a shared
298
trace.mutter('creating repository in %s.', a_controldir.transport.base)
314
mutter('creating repository in %s.', a_bzrdir.transport.base)
315
dirs = ['revision-store', 'knits']
301
317
utf8_files = [('format', self.get_format_string())]
303
self._upload_blank_content(
304
a_controldir, dirs, files, utf8_files, shared)
305
repo_transport = a_controldir.get_repository_transport(None)
319
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
320
repo_transport = a_bzrdir.get_repository_transport(None)
306
321
control_files = lockable_files.LockableFiles(repo_transport,
307
'lock', lockdir.LockDir)
322
'lock', lockdir.LockDir)
323
control_store = self._get_control_store(repo_transport, control_files)
308
324
transaction = transactions.WriteTransaction()
309
result = self.open(a_controldir=a_controldir, _found=True)
325
# trigger a write of the inventory store.
326
control_store.get_weave_or_empty('inventory', transaction)
327
_revision_store = self._get_revision_store(repo_transport, control_files)
311
328
# the revision id here is irrelevant: it will not be stored, and cannot
312
# already exist, we do this to create files on disk for older clients.
313
result.inventories.get_parent_map([(b'A',)])
314
result.revisions.get_parent_map([(b'A',)])
315
result.signatures.get_parent_map([(b'A',)])
317
self._run_post_repo_init_hooks(result, a_controldir, shared)
330
_revision_store.has_revision_id('A', transaction)
331
_revision_store.get_signature_file(transaction)
332
return self.open(a_bzrdir=a_bzrdir, _found=True)
320
def open(self, a_controldir, _found=False, _override_transport=None):
334
def open(self, a_bzrdir, _found=False, _override_transport=None):
321
335
"""See RepositoryFormat.open().
323
337
:param _override_transport: INTERNAL USE ONLY. Allows opening the
324
338
repository at a slightly different url
325
339
than normal. I.e. during 'upgrade'.
328
format = RepositoryFormatMetaDir.find_format(a_controldir)
342
format = RepositoryFormat.find_format(a_bzrdir)
343
assert format.__class__ == self.__class__
329
344
if _override_transport is not None:
330
345
repo_transport = _override_transport
332
repo_transport = a_controldir.get_repository_transport(None)
347
repo_transport = a_bzrdir.get_repository_transport(None)
333
348
control_files = lockable_files.LockableFiles(repo_transport,
334
'lock', lockdir.LockDir)
335
repo = self.repository_class(_format=self,
336
a_controldir=a_controldir,
337
control_files=control_files,
338
_commit_builder_class=self._commit_builder_class,
339
_serializer=self._serializer)
340
repo.revisions = self._get_revisions(repo_transport, repo)
341
repo.signatures = self._get_signatures(repo_transport, repo)
342
repo.inventories = self._get_inventories(repo_transport, repo)
343
repo.texts = self._get_texts(repo_transport, repo)
344
repo.chk_bytes = None
345
repo._transport = repo_transport
349
'lock', lockdir.LockDir)
350
text_store = self._get_text_store(repo_transport, control_files)
351
control_store = self._get_control_store(repo_transport, control_files)
352
_revision_store = self._get_revision_store(repo_transport, control_files)
353
return KnitRepository(_format=self,
355
control_files=control_files,
356
_revision_store=_revision_store,
357
control_store=control_store,
358
text_store=text_store)
349
361
class RepositoryFormatKnit1(RepositoryFormatKnit):
398
405
- support for recording tree-references
401
repository_class = KnitRepository
402
_commit_builder_class = VersionedFileCommitBuilder
408
repository_class = KnitRepository3
403
409
rich_root_data = True
405
410
supports_tree_reference = True
408
def _serializer(self):
409
return xml7.serializer_v7
411
412
def _get_matching_bzrdir(self):
412
return controldir.format_registry.make_controldir('dirstate-with-subtree')
413
return bzrdir.format_registry.make_bzrdir('dirstate-with-subtree')
414
415
def _ignore_setting_bzrdir(self, format):
417
_matchingcontroldir = property(
418
_get_matching_bzrdir, _ignore_setting_bzrdir)
418
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
421
def get_format_string(cls):
420
def check_conversion_target(self, target_format):
421
if not target_format.rich_root_data:
422
raise errors.BadConversionTarget(
423
'Does not support rich root data.', target_format)
424
if not getattr(target_format, 'supports_tree_reference', False):
425
raise errors.BadConversionTarget(
426
'Does not support nested trees', target_format)
428
def get_format_string(self):
422
429
"""See RepositoryFormat.get_format_string()."""
423
return b"Bazaar Knit Repository Format 3 (bzr 0.15)\n"
430
return "Bazaar Knit Repository Format 3 (bzr 0.15)\n"
425
432
def get_format_description(self):
426
433
"""See RepositoryFormat.get_format_description()."""
427
434
return "Knit repository format 3"
430
class RepositoryFormatKnit4(RepositoryFormatKnit):
431
"""Bzr repository knit format 4.
433
This repository format has everything in format 3, except for
435
- knits for file texts and inventory
436
- hash subdirectory based stores.
437
- knits for revisions and signatures
438
- TextStores for revisions and signatures.
439
- a format marker of its own
440
- an optional 'shared-storage' flag
441
- an optional 'no-working-trees' flag
443
- support for recording full info about the tree root
446
repository_class = KnitRepository
447
_commit_builder_class = VersionedFileCommitBuilder
448
rich_root_data = True
449
supports_tree_reference = False
452
def _serializer(self):
453
return xml6.serializer_v6
455
def _get_matching_bzrdir(self):
456
return controldir.format_registry.make_controldir('rich-root')
458
def _ignore_setting_bzrdir(self, format):
461
_matchingcontroldir = property(
462
_get_matching_bzrdir, _ignore_setting_bzrdir)
465
def get_format_string(cls):
466
"""See RepositoryFormat.get_format_string()."""
467
return b'Bazaar Knit Repository Format 4 (bzr 1.0)\n'
469
def get_format_description(self):
470
"""See RepositoryFormat.get_format_description()."""
471
return "Knit repository format 4"
474
class InterKnitRepo(InterSameDataRepository):
475
"""Optimised code paths between Knit based repositories."""
478
def _get_repo_format_to_test(self):
479
return RepositoryFormatKnit1()
482
def is_compatible(source, target):
483
"""Be compatible with known Knit formats.
485
We don't test for the stores being of specific types because that
486
could lead to confusing results, and there is no need to be
436
def open(self, a_bzrdir, _found=False, _override_transport=None):
437
"""See RepositoryFormat.open().
439
:param _override_transport: INTERNAL USE ONLY. Allows opening the
440
repository at a slightly different url
441
than normal. I.e. during 'upgrade'.
490
are_knits = (isinstance(source._format, RepositoryFormatKnit)
491
and isinstance(target._format, RepositoryFormatKnit))
492
except AttributeError:
494
return are_knits and InterRepository._same_model(source, target)
496
def search_missing_revision_ids(self,
497
find_ghosts=True, revision_ids=None, if_present_ids=None,
499
"""See InterRepository.search_missing_revision_ids()."""
500
with self.lock_read():
501
source_ids_set = self._present_source_revisions_for(
502
revision_ids, if_present_ids)
503
# source_ids is the worst possible case we may need to pull.
504
# now we want to filter source_ids against what we actually
505
# have in target, but don't try to check for existence where we know
506
# we do not have a revision as that would be pointless.
507
target_ids = set(self.target.all_revision_ids())
508
possibly_present_revisions = target_ids.intersection(
510
actually_present_revisions = set(
511
self.target._eliminate_revisions_not_present(possibly_present_revisions))
512
required_revisions = source_ids_set.difference(
513
actually_present_revisions)
514
if revision_ids is not None:
515
# we used get_ancestry to determine source_ids then we are assured all
516
# revisions referenced are present as they are installed in topological order.
517
# and the tip revision was validated by get_ancestry.
518
result_set = required_revisions
520
# if we just grabbed the possibly available ids, then
521
# we only have an estimate of whats available and need to validate
522
# that against the revision records.
524
self.source._eliminate_revisions_not_present(required_revisions))
525
if limit is not None:
526
topo_ordered = self.source.get_graph().iter_topo_order(result_set)
527
result_set = set(itertools.islice(topo_ordered, limit))
528
return self.source.revision_ids_to_search_result(result_set)
531
InterRepository.register_optimiser(InterKnitRepo)
444
format = RepositoryFormat.find_format(a_bzrdir)
445
assert format.__class__ == self.__class__
446
if _override_transport is not None:
447
repo_transport = _override_transport
449
repo_transport = a_bzrdir.get_repository_transport(None)
450
control_files = lockable_files.LockableFiles(repo_transport, 'lock',
452
text_store = self._get_text_store(repo_transport, control_files)
453
control_store = self._get_control_store(repo_transport, control_files)
454
_revision_store = self._get_revision_store(repo_transport, control_files)
455
return self.repository_class(_format=self,
457
control_files=control_files,
458
_revision_store=_revision_store,
459
control_store=control_store,
460
text_store=text_store)