527
def add_fallback_repository(self, repository):
528
"""Add a repository to use for looking up data not held locally.
530
:param repository: A repository.
532
if not self._format.supports_external_lookups:
533
raise errors.UnstackableRepositoryFormat(self._format, self.base)
534
if not self._add_fallback_repository_check(repository):
535
raise errors.IncompatibleRepositories(self, repository)
536
self._fallback_repositories.append(repository)
537
self.texts.add_fallback_versioned_files(repository.texts)
538
self.inventories.add_fallback_versioned_files(repository.inventories)
539
self.revisions.add_fallback_versioned_files(repository.revisions)
540
self.signatures.add_fallback_versioned_files(repository.signatures)
542
def _add_fallback_repository_check(self, repository):
543
"""Check that this repository can fallback to repository safely.
545
:param repository: A repository to fallback to.
546
:return: True if the repositories can stack ok.
548
return InterRepository._same_model(self, repository)
527
550
def add_inventory(self, revision_id, inv, parents):
528
551
"""Add the inventory inv to the repository as revision_id.
597
620
def all_revision_ids(self):
598
621
"""Returns a list of all the revision ids in the repository.
600
This is deprecated because code should generally work on the graph
601
reachable from a particular revision, and ignore any other revisions
602
that might be present. There is no direct replacement method.
623
This is conceptually deprecated because code should generally work on
624
the graph reachable from a particular revision, and ignore any other
625
revisions that might be present. There is no direct replacement
604
628
if 'evil' in debug.debug_flags:
605
629
mutter_callsite(2, "all_revision_ids is linear with history.")
665
689
# on whether escaping is required.
666
690
self._warn_if_deprecated()
667
691
self._write_group = None
692
# Additional places to query for data.
693
self._fallback_repositories = []
669
695
def __repr__(self):
670
696
return '%s(%r)' % (self.__class__.__name__,
718
744
XXX: this docstring is duplicated in many places, e.g. lockable_files.py
720
746
result = self.control_files.lock_write(token=token)
747
for repo in self._fallback_repositories:
748
# Writes don't affect fallback repos
721
750
self._refresh_data()
724
753
def lock_read(self):
725
754
self.control_files.lock_read()
755
for repo in self._fallback_repositories:
726
757
self._refresh_data()
728
759
def get_physical_lock_status(self):
962
993
raise errors.BzrError(
963
994
'Must end write groups before releasing write locks.')
964
995
self.control_files.unlock()
996
for repo in self._fallback_repositories:
967
1000
def clone(self, a_bzrdir, revision_id=None):
2264
2297
'bzrlib.repofmt.pack_repo',
2265
2298
'RepositoryFormatPackDevelopment0Subtree',
2300
format_registry.register_lazy(
2301
"Bazaar development format 1 (needs bzr.dev from before 1.6)\n",
2302
'bzrlib.repofmt.pack_repo',
2303
'RepositoryFormatPackDevelopment1',
2305
format_registry.register_lazy(
2306
("Bazaar development format 1 with subtree support "
2307
"(needs bzr.dev from before 1.6)\n"),
2308
'bzrlib.repofmt.pack_repo',
2309
'RepositoryFormatPackDevelopment1Subtree',
2267
2311
# 1.3->1.4 go below here
2674
2718
# to fetch from all packs to one without
2675
2719
# inventory parsing etc, IFF nothing to be copied is in the target.
2677
revision_ids = self.source.all_revision_ids()
2721
source_revision_ids = frozenset(self.source.all_revision_ids())
2722
revision_ids = source_revision_ids - \
2723
frozenset(self.target.get_parent_map(source_revision_ids))
2678
2724
revision_keys = [(revid,) for revid in revision_ids]
2679
2725
index = self.target._pack_collection.revision_index.combined_index
2680
2726
present_revision_ids = set(item[1][0] for item in
2721
2767
if not find_ghosts and revision_id is not None:
2722
2768
return self._walk_to_common_revisions([revision_id])
2723
2769
elif revision_id is not None:
2724
source_ids = self.source.get_ancestry(revision_id)
2725
if source_ids[0] is not None:
2726
raise AssertionError()
2770
# Find ghosts: search for revisions pointing from one repository to
2771
# the other, and vice versa, anywhere in the history of revision_id.
2772
graph = self.target.get_graph(other_repository=self.source)
2773
searcher = graph._make_breadth_first_searcher([revision_id])
2777
next_revs, ghosts = searcher.next_with_ghosts()
2778
except StopIteration:
2780
if revision_id in ghosts:
2781
raise errors.NoSuchRevision(self.source, revision_id)
2782
found_ids.update(next_revs)
2783
found_ids.update(ghosts)
2784
found_ids = frozenset(found_ids)
2785
# Double query here: should be able to avoid this by changing the
2786
# graph api further.
2787
result_set = found_ids - frozenset(
2788
self.target.get_graph().get_parent_map(found_ids))
2729
2790
source_ids = self.source.all_revision_ids()
2730
# source_ids is the worst possible case we may need to pull.
2731
# now we want to filter source_ids against what we actually
2732
# have in target, but don't try to check for existence where we know
2733
# we do not have a revision as that would be pointless.
2734
target_ids = set(self.target.all_revision_ids())
2735
result_set = set(source_ids).difference(target_ids)
2791
# source_ids is the worst possible case we may need to pull.
2792
# now we want to filter source_ids against what we actually
2793
# have in target, but don't try to check for existence where we know
2794
# we do not have a revision as that would be pointless.
2795
target_ids = set(self.target.all_revision_ids())
2796
result_set = set(source_ids).difference(target_ids)
2736
2797
return self.source.revision_ids_to_search_result(result_set)