58
58
def __repr__(self):
59
59
return 'DictParentsProvider(%r)' % self.ancestry
61
# Note: DictParentsProvider does not implement get_cached_parent_map
62
# Arguably, the data is clearly cached in memory. However, this class
63
# is mostly used for testing, and it keeps the tests clean to not
66
61
def get_parent_map(self, keys):
67
62
"""See StackedParentsProvider.get_parent_map"""
68
63
ancestry = self.ancestry
69
return dict([(k, ancestry[k]) for k in keys if k in ancestry])
64
return dict((k, ancestry[k]) for k in keys if k in ancestry)
66
@deprecated_function(deprecated_in((1, 16, 0)))
67
def _StackedParentsProvider(*args, **kwargs):
68
return StackedParentsProvider(*args, **kwargs)
72
70
class StackedParentsProvider(object):
73
71
"""A parents provider which stacks (or unions) multiple providers.
75
73
The providers are queries in the order of the provided parent_providers.
78
76
def __init__(self, parent_providers):
79
77
self._parent_providers = parent_providers
308
257
right = searchers[1].seen
309
258
return (left.difference(right), right.difference(left))
311
def find_descendants(self, old_key, new_key):
312
"""Find descendants of old_key that are ancestors of new_key."""
313
child_map = self.get_child_map(self._find_descendant_ancestors(
315
graph = Graph(DictParentsProvider(child_map))
316
searcher = graph._make_breadth_first_searcher([old_key])
320
def _find_descendant_ancestors(self, old_key, new_key):
321
"""Find ancestors of new_key that may be descendants of old_key."""
322
stop = self._make_breadth_first_searcher([old_key])
323
descendants = self._make_breadth_first_searcher([new_key])
324
for revisions in descendants:
325
old_stop = stop.seen.intersection(revisions)
326
descendants.stop_searching_any(old_stop)
327
seen_stop = descendants.find_seen_ancestors(stop.step())
328
descendants.stop_searching_any(seen_stop)
329
return descendants.seen.difference(stop.seen)
331
def get_child_map(self, keys):
332
"""Get a mapping from parents to children of the specified keys.
334
This is simply the inversion of get_parent_map. Only supplied keys
335
will be discovered as children.
336
:return: a dict of key:child_list for keys.
338
parent_map = self._parents_provider.get_parent_map(keys)
340
for child, parents in sorted(parent_map.items()):
341
for parent in parents:
342
parent_child.setdefault(parent, []).append(child)
345
260
def find_distance_to_null(self, target_revision_id, known_revision_ids):
346
261
"""Find the left-hand distance to the NULL_REVISION.
601
515
for searcher in unique_tip_searchers:
602
516
common_to_all_unique_nodes.intersection_update(searcher.seen)
603
517
common_to_all_unique_nodes.intersection_update(
604
all_unique_searcher.seen)
518
all_unique_searcher.seen)
605
519
# Step all-unique less frequently than the other searchers.
606
520
# In the common case, we don't need to spider out far here, so
607
521
# avoid doing extra work.
608
522
if step_all_unique:
609
tstart = osutils.perf_counter()
523
tstart = time.clock()
610
524
nodes = all_unique_searcher.step()
611
525
common_to_all_unique_nodes.update(nodes)
612
526
if 'graph' in debug.debug_flags:
613
tdelta = osutils.perf_counter() - tstart
527
tdelta = time.clock() - tstart
614
528
trace.mutter('all_unique_searcher step() took %.3fs'
615
529
'for %d nodes (%d total), iteration: %s',
616
530
tdelta, len(nodes), len(all_unique_searcher.seen),
1242
1116
# This is the same as the following loop. I don't know that it is any
1244
# simple_ancestors.difference_update(r for r, p_ids in parent_map.iteritems()
1245
# if p_ids is not None and revisions.intersection(p_ids))
1246
# return simple_ancestors
1118
## simple_ancestors.difference_update(r for r, p_ids in parent_map.iteritems()
1119
## if p_ids is not None and revisions.intersection(p_ids))
1120
## return simple_ancestors
1248
1122
# Yet Another Way, invert the parent map (which can be cached)
1249
1123
## descendants = {}
1250
# for revision_id, parent_ids in parent_map.iteritems():
1251
# for p_id in parent_ids:
1124
## for revision_id, parent_ids in parent_map.iteritems():
1125
## for p_id in parent_ids:
1252
1126
## descendants.setdefault(p_id, []).append(revision_id)
1253
# for revision in revisions.intersection(descendants):
1254
# simple_ancestors.difference_update(descendants[revision])
1255
# return simple_ancestors
1256
for revision, parent_ids in parent_map.items():
1127
## for revision in revisions.intersection(descendants):
1128
## simple_ancestors.difference_update(descendants[revision])
1129
## return simple_ancestors
1130
for revision, parent_ids in parent_map.iteritems():
1257
1131
if parent_ids is None:
1259
1133
for parent_id in parent_ids:
1589
1462
return revs, ghosts
1592
def invert_parent_map(parent_map):
1593
"""Given a map from child => parents, create a map of parent=>children"""
1595
for child, parents in parent_map.items():
1597
# Any given parent is likely to have only a small handful
1598
# of children, many will have only one. So we avoid mem overhead of
1599
# a list, in exchange for extra copying of tuples
1600
if p not in child_map:
1601
child_map[p] = (child,)
1603
child_map[p] = child_map[p] + (child,)
1465
class SearchResult(object):
1466
"""The result of a breadth first search.
1468
A SearchResult provides the ability to reconstruct the search or access a
1469
set of the keys the search found.
1472
def __init__(self, start_keys, exclude_keys, key_count, keys):
1473
"""Create a SearchResult.
1475
:param start_keys: The keys the search started at.
1476
:param exclude_keys: The keys the search excludes.
1477
:param key_count: The total number of keys (from start to but not
1479
:param keys: The keys the search found. Note that in future we may get
1480
a SearchResult from a smart server, in which case the keys list is
1481
not necessarily immediately available.
1483
self._recipe = ('search', start_keys, exclude_keys, key_count)
1484
self._keys = frozenset(keys)
1486
def get_recipe(self):
1487
"""Return a recipe that can be used to replay this search.
1489
The recipe allows reconstruction of the same results at a later date
1490
without knowing all the found keys. The essential elements are a list
1491
of keys to start and to stop at. In order to give reproducible
1492
results when ghosts are encountered by a search they are automatically
1493
added to the exclude list (or else ghost filling may alter the
1496
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1497
revision_count). To recreate the results of this search, create a
1498
breadth first searcher on the same graph starting at start_keys.
1499
Then call next() (or next_with_ghosts()) repeatedly, and on every
1500
result, call stop_searching_any on any keys from the exclude_keys
1501
set. The revision_count value acts as a trivial cross-check - the
1502
found revisions of the new search should have as many elements as
1503
revision_count. If it does not, then additional revisions have been
1504
ghosted since the search was executed the first time and the second
1510
"""Return the keys found in this search.
1512
:return: A set of keys.
1517
"""Return false if the search lists 1 or more revisions."""
1518
return self._recipe[3] == 0
1520
def refine(self, seen, referenced):
1521
"""Create a new search by refining this search.
1523
:param seen: Revisions that have been satisfied.
1524
:param referenced: Revision references observed while satisfying some
1527
start = self._recipe[1]
1528
exclude = self._recipe[2]
1529
count = self._recipe[3]
1530
keys = self.get_keys()
1531
# New heads = referenced + old heads - seen things - exclude
1532
pending_refs = set(referenced)
1533
pending_refs.update(start)
1534
pending_refs.difference_update(seen)
1535
pending_refs.difference_update(exclude)
1536
# New exclude = old exclude + satisfied heads
1537
seen_heads = start.intersection(seen)
1538
exclude.update(seen_heads)
1539
# keys gets seen removed
1541
# length is reduced by len(seen)
1543
return SearchResult(pending_refs, exclude, count, keys)
1546
class PendingAncestryResult(object):
1547
"""A search result that will reconstruct the ancestry for some graph heads.
1549
Unlike SearchResult, this doesn't hold the complete search result in
1550
memory, it just holds a description of how to generate it.
1553
def __init__(self, heads, repo):
1556
:param heads: an iterable of graph heads.
1557
:param repo: a repository to use to generate the ancestry for the given
1560
self.heads = frozenset(heads)
1563
def get_recipe(self):
1564
"""Return a recipe that can be used to replay this search.
1566
The recipe allows reconstruction of the same results at a later date.
1568
:seealso SearchResult.get_recipe:
1570
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1571
To recreate this result, create a PendingAncestryResult with the
1574
return ('proxy-search', self.heads, set(), -1)
1577
"""See SearchResult.get_keys.
1579
Returns all the keys for the ancestry of the heads, excluding
1582
return self._get_keys(self.repo.get_graph())
1584
def _get_keys(self, graph):
1585
NULL_REVISION = revision.NULL_REVISION
1586
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1587
if key != NULL_REVISION and parents is not None]
1591
"""Return false if the search lists 1 or more revisions."""
1592
if revision.NULL_REVISION in self.heads:
1593
return len(self.heads) == 1
1595
return len(self.heads) == 0
1597
def refine(self, seen, referenced):
1598
"""Create a new search by refining this search.
1600
:param seen: Revisions that have been satisfied.
1601
:param referenced: Revision references observed while satisfying some
1604
referenced = self.heads.union(referenced)
1605
return PendingAncestryResult(referenced - seen, self.repo)
1607
1608
def collapse_linear_regions(parent_map):
1679
class GraphThunkIdsToKeys(object):
1680
"""Forwards calls about 'ids' to be about keys internally."""
1682
def __init__(self, graph):
1685
def topo_sort(self):
1686
return [r for (r,) in self._graph.topo_sort()]
1688
def heads(self, ids):
1689
"""See Graph.heads()"""
1690
as_keys = [(i,) for i in ids]
1691
head_keys = self._graph.heads(as_keys)
1692
return {h[0] for h in head_keys}
1694
def merge_sort(self, tip_revision):
1695
nodes = self._graph.merge_sort((tip_revision,))
1697
node.key = node.key[0]
1700
def add_node(self, revision, parents):
1701
self._graph.add_node((revision,), [(p,) for p in parents])
1704
_counters = [0, 0, 0, 0, 0, 0, 0]
1681
_counters = [0,0,0,0,0,0,0]
1706
from ._known_graph_pyx import KnownGraph
1707
except ImportError as e:
1708
osutils.failed_to_load_extension(e)
1709
from ._known_graph_py import KnownGraph # noqa: F401
1683
from bzrlib._known_graph_pyx import KnownGraph
1685
from bzrlib._known_graph_py import KnownGraph