1
# Copyright (C) 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22
from bzrlib.deprecated_graph import (node_distances, select_farthest)
24
# DIAGRAM of terminology
34
# In this diagram, relative to G and H:
35
# A, B, C, D, E are common ancestors.
36
# C, D and E are border ancestors, because each has a non-common descendant.
37
# D and E are least common ancestors because none of their descendants are
39
# C is not a least common ancestor because its descendant, E, is a common
42
# The find_unique_lca algorithm will pick A in two steps:
43
# 1. find_lca('G', 'H') => ['D', 'E']
44
# 2. Since len(['D', 'E']) > 1, find_lca('D', 'E') => ['A']
47
class DictParentsProvider(object):
49
def __init__(self, ancestry):
50
self.ancestry = ancestry
53
return 'DictParentsProvider(%r)' % self.ancestry
55
def get_parents(self, revisions):
56
return [self.ancestry.get(r, None) for r in revisions]
59
class _StackedParentsProvider(object):
61
def __init__(self, parent_providers):
62
self._parent_providers = parent_providers
65
return "_StackedParentsProvider(%r)" % self._parent_providers
67
def get_parents(self, revision_ids):
68
"""Find revision ids of the parents of a list of revisions
70
A list is returned of the same length as the input. Each entry
71
is a list of parent ids for the corresponding input revision.
73
[NULL_REVISION] is used as the parent of the first user-committed
74
revision. Its parent list is empty.
76
If the revision is not present (i.e. a ghost), None is used in place
77
of the list of parents.
80
for parents_provider in self._parent_providers:
81
pending_revisions = [r for r in revision_ids if r not in found]
82
parent_list = parents_provider.get_parents(pending_revisions)
83
new_found = dict((k, v) for k, v in zip(pending_revisions,
84
parent_list) if v is not None)
85
found.update(new_found)
86
if len(found) == len(revision_ids):
88
return [found.get(r, None) for r in revision_ids]
92
"""Provide incremental access to revision graphs.
94
This is the generic implementation; it is intended to be subclassed to
95
specialize it for other repository types.
98
def __init__(self, parents_provider):
99
"""Construct a Graph that uses several graphs as its input
101
This should not normally be invoked directly, because there may be
102
specialized implementations for particular repository types. See
103
Repository.get_graph()
105
:param parents_provider: An object providing a get_parents call
106
conforming to the behavior of StackedParentsProvider.get_parents
108
self.get_parents = parents_provider.get_parents
109
self._parents_provider = parents_provider
112
return 'Graph(%r)' % self._parents_provider
114
def find_lca(self, *revisions):
115
"""Determine the lowest common ancestors of the provided revisions
117
A lowest common ancestor is a common ancestor none of whose
118
descendants are common ancestors. In graphs, unlike trees, there may
119
be multiple lowest common ancestors.
121
This algorithm has two phases. Phase 1 identifies border ancestors,
122
and phase 2 filters border ancestors to determine lowest common
125
In phase 1, border ancestors are identified, using a breadth-first
126
search starting at the bottom of the graph. Searches are stopped
127
whenever a node or one of its descendants is determined to be common
129
In phase 2, the border ancestors are filtered to find the least
130
common ancestors. This is done by searching the ancestries of each
133
Phase 2 is perfomed on the principle that a border ancestor that is
134
not an ancestor of any other border ancestor is a least common
137
Searches are stopped when they find a node that is determined to be a
138
common ancestor of all border ancestors, because this shows that it
139
cannot be a descendant of any border ancestor.
141
The scaling of this operation should be proportional to
142
1. The number of uncommon ancestors
143
2. The number of border ancestors
144
3. The length of the shortest path between a border ancestor and an
145
ancestor of all border ancestors.
147
border_common, common, sides = self._find_border_ancestors(revisions)
148
# We may have common ancestors that can be reached from each other.
149
# - ask for the heads of them to filter it down to only ones that
150
# cannot be reached from each other - phase 2.
151
return self.heads(border_common)
153
def find_difference(self, left_revision, right_revision):
154
"""Determine the graph difference between two revisions"""
155
border, common, (left, right) = self._find_border_ancestors(
156
[left_revision, right_revision])
157
return (left.difference(right).difference(common),
158
right.difference(left).difference(common))
160
def _make_breadth_first_searcher(self, revisions):
161
return _BreadthFirstSearcher(revisions, self)
163
def _find_border_ancestors(self, revisions):
164
"""Find common ancestors with at least one uncommon descendant.
166
Border ancestors are identified using a breadth-first
167
search starting at the bottom of the graph. Searches are stopped
168
whenever a node or one of its descendants is determined to be common.
170
This will scale with the number of uncommon ancestors.
172
As well as the border ancestors, a set of seen common ancestors and a
173
list of sets of seen ancestors for each input revision is returned.
174
This allows calculation of graph difference from the results of this
177
if None in revisions:
178
raise errors.InvalidRevisionId(None, self)
179
common_searcher = self._make_breadth_first_searcher([])
180
common_ancestors = set()
181
searchers = [self._make_breadth_first_searcher([r])
183
active_searchers = searchers[:]
184
border_ancestors = set()
185
def update_common(searcher, revisions):
186
w_seen_ancestors = searcher.find_seen_ancestors(
188
stopped = searcher.stop_searching_any(w_seen_ancestors)
189
common_ancestors.update(w_seen_ancestors)
190
common_searcher.start_searching(stopped)
193
if len(active_searchers) == 0:
194
return border_ancestors, common_ancestors, [s.seen for s in
197
new_common = common_searcher.next()
198
common_ancestors.update(new_common)
199
except StopIteration:
202
for searcher in active_searchers:
203
for revision in new_common.intersection(searcher.seen):
204
update_common(searcher, revision)
207
new_active_searchers = []
208
for searcher in active_searchers:
210
newly_seen.update(searcher.next())
211
except StopIteration:
214
new_active_searchers.append(searcher)
215
active_searchers = new_active_searchers
216
for revision in newly_seen:
217
if revision in common_ancestors:
218
for searcher in searchers:
219
update_common(searcher, revision)
221
for searcher in searchers:
222
if revision not in searcher.seen:
225
border_ancestors.add(revision)
226
for searcher in searchers:
227
update_common(searcher, revision)
229
def heads(self, keys):
230
"""Return the heads from amongst keys.
232
This is done by searching the ancestries of each key. Any key that is
233
reachable from another key is not returned; all the others are.
235
This operation scales with the relative depth between any two keys. If
236
any two keys are completely disconnected all ancestry of both sides
239
:param keys: An iterable of keys.
240
:return: A set of the heads. Note that as a set there is no ordering
241
information. Callers will need to filter their input to create
242
order if they need it.
244
candidate_heads = set(keys)
245
if revision.NULL_REVISION in candidate_heads:
246
# NULL_REVISION is only a head if it is the only entry
247
candidate_heads.remove(revision.NULL_REVISION)
248
if not candidate_heads:
249
return set([revision.NULL_REVISION])
250
if len(candidate_heads) < 2:
251
return candidate_heads
252
searchers = dict((c, self._make_breadth_first_searcher([c]))
253
for c in candidate_heads)
254
active_searchers = dict(searchers)
255
# skip over the actual candidate for each searcher
256
for searcher in active_searchers.itervalues():
258
# The common walker finds nodes that are common to two or more of the
259
# input keys, so that we don't access all history when a currently
260
# uncommon search point actually meets up with something behind a
261
# common search point. Common search points do not keep searches
262
# active; they just allow us to make searches inactive without
263
# accessing all history.
264
common_walker = self._make_breadth_first_searcher([])
265
while len(active_searchers) > 0:
270
except StopIteration:
271
# No common points being searched at this time.
273
for candidate in active_searchers.keys():
275
searcher = active_searchers[candidate]
277
# rare case: we deleted candidate in a previous iteration
278
# through this for loop, because it was determined to be
279
# a descendant of another candidate.
282
ancestors.update(searcher.next())
283
except StopIteration:
284
del active_searchers[candidate]
286
# process found nodes
288
for ancestor in ancestors:
289
if ancestor in candidate_heads:
290
candidate_heads.remove(ancestor)
291
del searchers[ancestor]
292
if ancestor in active_searchers:
293
del active_searchers[ancestor]
294
# it may meet up with a known common node
295
if ancestor in common_walker.seen:
296
# some searcher has encountered our known common nodes:
298
ancestor_set = set([ancestor])
299
for searcher in searchers.itervalues():
300
searcher.stop_searching_any(ancestor_set)
302
# or it may have been just reached by all the searchers:
303
for searcher in searchers.itervalues():
304
if ancestor not in searcher.seen:
307
# The final active searcher has just reached this node,
308
# making it be known as a descendant of all candidates,
309
# so we can stop searching it, and any seen ancestors
310
new_common.add(ancestor)
311
for searcher in searchers.itervalues():
313
searcher.find_seen_ancestors(ancestor)
314
searcher.stop_searching_any(seen_ancestors)
315
common_walker.start_searching(new_common)
316
return candidate_heads
318
def find_unique_lca(self, left_revision, right_revision,
320
"""Find a unique LCA.
322
Find lowest common ancestors. If there is no unique common
323
ancestor, find the lowest common ancestors of those ancestors.
325
Iteration stops when a unique lowest common ancestor is found.
326
The graph origin is necessarily a unique lowest common ancestor.
328
Note that None is not an acceptable substitute for NULL_REVISION.
329
in the input for this method.
331
:param count_steps: If True, the return value will be a tuple of
332
(unique_lca, steps) where steps is the number of times that
333
find_lca was run. If False, only unique_lca is returned.
335
revisions = [left_revision, right_revision]
339
lca = self.find_lca(*revisions)
347
raise errors.NoCommonAncestor(left_revision, right_revision)
350
def iter_topo_order(self, revisions):
351
"""Iterate through the input revisions in topological order.
353
This sorting only ensures that parents come before their children.
354
An ancestor may sort after a descendant if the relationship is not
355
visible in the supplied list of revisions.
357
sorter = tsort.TopoSorter(zip(revisions, self.get_parents(revisions)))
358
return sorter.iter_topo_order()
360
def is_ancestor(self, candidate_ancestor, candidate_descendant):
361
"""Determine whether a revision is an ancestor of another.
363
We answer this using heads() as heads() has the logic to perform the
364
smallest number of parent lookups to determine the ancestral
365
relationship between N revisions.
367
return set([candidate_descendant]) == self.heads(
368
[candidate_ancestor, candidate_descendant])
371
class HeadsCache(object):
372
"""A cache of results for graph heads calls."""
374
def __init__(self, graph):
378
def heads(self, keys):
379
"""Return the heads of keys.
381
This matches the API of Graph.heads(), specifically the return value is
382
a set which can be mutated, and ordering of the input is not preserved
385
:see also: Graph.heads.
386
:param keys: The keys to calculate heads for.
387
:return: A set containing the heads, which may be mutated without
388
affecting future lookups.
390
keys = frozenset(keys)
392
return set(self._heads[keys])
394
heads = self.graph.heads(keys)
395
self._heads[keys] = heads
399
class HeadsCache(object):
400
"""A cache of results for graph heads calls."""
402
def __init__(self, graph):
406
def heads(self, keys):
407
"""Return the heads of keys.
409
:see also: Graph.heads.
410
:param keys: The keys to calculate heads for.
411
:return: A set containing the heads, which may be mutated without
412
affecting future lookups.
414
keys = frozenset(keys)
416
return set(self._heads[keys])
418
heads = self.graph.heads(keys)
419
self._heads[keys] = heads
423
class _BreadthFirstSearcher(object):
424
"""Parallel search breadth-first the ancestry of revisions.
426
This class implements the iterator protocol, but additionally
427
1. provides a set of seen ancestors, and
428
2. allows some ancestries to be unsearched, via stop_searching_any
431
def __init__(self, revisions, parents_provider):
432
self._start = set(revisions)
433
self._search_revisions = None
434
self.seen = set(revisions)
435
self._parents_provider = parents_provider
438
return ('_BreadthFirstSearcher(self._search_revisions=%r,'
439
' self.seen=%r)' % (self._search_revisions, self.seen))
442
"""Return the next ancestors of this revision.
444
Ancestors are returned in the order they are seen in a breadth-first
445
traversal. No ancestor will be returned more than once.
447
if self._search_revisions is None:
448
self._search_revisions = self._start
450
new_search_revisions = set()
451
for parents in self._parents_provider.get_parents(
452
self._search_revisions):
455
new_search_revisions.update(p for p in parents if
457
self._search_revisions = new_search_revisions
458
if len(self._search_revisions) == 0:
459
raise StopIteration()
460
self.seen.update(self._search_revisions)
461
return self._search_revisions
466
def find_seen_ancestors(self, revision):
467
"""Find ancestors of this revision that have already been seen."""
468
searcher = _BreadthFirstSearcher([revision], self._parents_provider)
469
seen_ancestors = set()
470
for ancestors in searcher:
471
for ancestor in ancestors:
472
if ancestor not in self.seen:
473
searcher.stop_searching_any([ancestor])
475
seen_ancestors.add(ancestor)
476
return seen_ancestors
478
def stop_searching_any(self, revisions):
480
Remove any of the specified revisions from the search list.
482
None of the specified revisions are required to be present in the
483
search list. In this case, the call is a no-op.
485
stopped = self._search_revisions.intersection(revisions)
486
self._search_revisions = self._search_revisions.difference(revisions)
489
def start_searching(self, revisions):
490
if self._search_revisions is None:
491
self._start = set(revisions)
493
self._search_revisions.update(revisions.difference(self.seen))
494
self.seen.update(revisions)