13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
19
from bzrlib import (
26
from bzrlib.symbol_versioning import deprecated_function, deprecated_in
27
from bzrlib.deprecated_graph import (node_distances, select_farthest)
28
29
STEP_UNIQUE_SEARCHER_EVERY = 5
60
61
return 'DictParentsProvider(%r)' % self.ancestry
62
63
def get_parent_map(self, keys):
63
"""See StackedParentsProvider.get_parent_map"""
64
"""See _StackedParentsProvider.get_parent_map"""
64
65
ancestry = self.ancestry
65
66
return dict((k, ancestry[k]) for k in keys if k in ancestry)
67
@deprecated_function(deprecated_in((1, 16, 0)))
68
def _StackedParentsProvider(*args, **kwargs):
69
return StackedParentsProvider(*args, **kwargs)
71
class StackedParentsProvider(object):
72
"""A parents provider which stacks (or unions) multiple providers.
74
The providers are queries in the order of the provided parent_providers.
69
class _StackedParentsProvider(object):
77
71
def __init__(self, parent_providers):
78
72
self._parent_providers = parent_providers
80
74
def __repr__(self):
81
return "%s(%r)" % (self.__class__.__name__, self._parent_providers)
75
return "_StackedParentsProvider(%r)" % self._parent_providers
83
77
def get_parent_map(self, keys):
84
78
"""Get a mapping of keys => parents
116
110
The cache is enabled by default, but may be disabled and re-enabled.
118
def __init__(self, parent_provider=None, get_parent_map=None):
112
def __init__(self, parent_provider=None, get_parent_map=None, debug=False):
121
115
:param parent_provider: The ParentProvider to use. It or
122
116
get_parent_map must be supplied.
123
117
:param get_parent_map: The get_parent_map callback to use. It or
124
118
parent_provider must be supplied.
119
:param debug: If true, mutter debugging messages.
126
121
self._real_provider = parent_provider
127
122
if get_parent_map is None:
128
123
self._get_parent_map = self._real_provider.get_parent_map
130
125
self._get_parent_map = get_parent_map
132
self.enable_cache(True)
127
self._cache_misses = True
130
self._requested_parents = None
134
132
def __repr__(self):
135
133
return "%s(%r)" % (self.__class__.__name__, self._real_provider)
137
135
def enable_cache(self, cache_misses=True):
138
136
"""Enable cache."""
139
if self._cache is not None:
140
raise AssertionError('Cache enabled when already enabled.')
142
138
self._cache_misses = cache_misses
143
self.missing_keys = set()
140
self._requested_parents = set()
145
142
def disable_cache(self):
146
143
"""Disable and clear the cache."""
147
144
self._cache = None
148
self._cache_misses = None
149
self.missing_keys = set()
146
self._requested_parents = None
151
148
def get_cached_map(self):
152
149
"""Return any cached get_parent_map values."""
153
150
if self._cache is None:
155
return dict(self._cache)
152
return dict((k, v) for k, v in self._cache.items()
157
155
def get_parent_map(self, keys):
158
"""See StackedParentsProvider.get_parent_map."""
161
cache = self._get_parent_map(keys)
156
"""See _StackedParentsProvider.get_parent_map."""
157
# Hack to build up the caching logic.
158
ancestry = self._cache
160
# Caching is disabled.
161
missing_revisions = set(keys)
163
needed_revisions = set(key for key in keys if key not in cache)
164
# Do not ask for negatively cached keys
165
needed_revisions.difference_update(self.missing_keys)
167
parent_map = self._get_parent_map(needed_revisions)
168
cache.update(parent_map)
169
if self._cache_misses:
170
for key in needed_revisions:
171
if key not in parent_map:
172
self.note_missing_key(key)
175
value = cache.get(key)
176
if value is not None:
180
def note_missing_key(self, key):
181
"""Note that key is a missing key."""
182
if self._cache_misses:
183
self.missing_keys.add(key)
164
missing_revisions = set(key for key in keys if key not in ancestry)
165
if missing_revisions:
166
parent_map = self._get_parent_map(missing_revisions)
168
mutter('re-retrieved revisions: %d of %d',
169
len(set(ancestry).intersection(parent_map)),
171
ancestry.update(parent_map)
172
if self._cache_misses:
173
# None is never a valid parents list, so it can be used to
175
ancestry.update(dict((k, None) for k in missing_revisions
176
if k not in parent_map))
177
present_keys = [k for k in keys if ancestry.get(k) is not None]
179
if self._requested_parents is not None and len(ancestry) != 0:
180
self._requested_parents.update(present_keys)
181
mutter('Current hit rate: %d%%',
182
100.0 * len(self._requested_parents) / len(ancestry))
183
return dict((k, ancestry[k]) for k in present_keys)
186
186
class Graph(object):
312
312
return known_revnos[cur_tip] + num_steps
314
def find_lefthand_distances(self, keys):
315
"""Find the distance to null for all the keys in keys.
317
:param keys: keys to lookup.
318
:return: A dict key->distance for all of keys.
320
# Optimisable by concurrent searching, but a random spread should get
321
# some sort of hit rate.
328
(key, self.find_distance_to_null(key, known_revnos)))
329
except errors.GhostRevisionsHaveNoRevno:
332
known_revnos.append((key, -1))
333
return dict(known_revnos)
335
314
def find_unique_ancestors(self, unique_revision, common_revisions):
336
315
"""Find the unique ancestors for a revision versus others.
637
616
all_unique_searcher._iterations)
638
617
unique_tip_searchers = next_unique_searchers
619
@symbol_versioning.deprecated_method(symbol_versioning.one_one)
620
def get_parents(self, revisions):
621
"""Find revision ids of the parents of a list of revisions
623
A list is returned of the same length as the input. Each entry
624
is a list of parent ids for the corresponding input revision.
626
[NULL_REVISION] is used as the parent of the first user-committed
627
revision. Its parent list is empty.
629
If the revision is not present (i.e. a ghost), None is used in place
630
of the list of parents.
632
Deprecated in bzr 1.2 - please see get_parent_map.
634
parents = self.get_parent_map(revisions)
635
return [parents.get(r, None) for r in revisions]
640
637
def get_parent_map(self, revisions):
641
638
"""Get a map of key:parent_list for revisions.
940
936
return set([candidate_descendant]) == self.heads(
941
937
[candidate_ancestor, candidate_descendant])
943
def is_between(self, revid, lower_bound_revid, upper_bound_revid):
944
"""Determine whether a revision is between two others.
946
returns true if and only if:
947
lower_bound_revid <= revid <= upper_bound_revid
949
return ((upper_bound_revid is None or
950
self.is_ancestor(revid, upper_bound_revid)) and
951
(lower_bound_revid is None or
952
self.is_ancestor(lower_bound_revid, revid)))
954
939
def _search_for_extra_common(self, common, searchers):
955
940
"""Make sure that unique nodes are genuinely unique.
1481
1466
a SearchResult from a smart server, in which case the keys list is
1482
1467
not necessarily immediately available.
1484
self._recipe = ('search', start_keys, exclude_keys, key_count)
1469
self._recipe = (start_keys, exclude_keys, key_count)
1485
1470
self._keys = frozenset(keys)
1487
1472
def get_recipe(self):
1488
1473
"""Return a recipe that can be used to replay this search.
1490
1475
The recipe allows reconstruction of the same results at a later date
1491
1476
without knowing all the found keys. The essential elements are a list
1492
of keys to start and to stop at. In order to give reproducible
1477
of keys to start and and to stop at. In order to give reproducible
1493
1478
results when ghosts are encountered by a search they are automatically
1494
1479
added to the exclude list (or else ghost filling may alter the
1497
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1498
revision_count). To recreate the results of this search, create a
1499
breadth first searcher on the same graph starting at start_keys.
1500
Then call next() (or next_with_ghosts()) repeatedly, and on every
1501
result, call stop_searching_any on any keys from the exclude_keys
1502
set. The revision_count value acts as a trivial cross-check - the
1503
found revisions of the new search should have as many elements as
1482
:return: A tuple (start_keys_set, exclude_keys_set, revision_count). To
1483
recreate the results of this search, create a breadth first
1484
searcher on the same graph starting at start_keys. Then call next()
1485
(or next_with_ghosts()) repeatedly, and on every result, call
1486
stop_searching_any on any keys from the exclude_keys set. The
1487
revision_count value acts as a trivial cross-check - the found
1488
revisions of the new search should have as many elements as
1504
1489
revision_count. If it does not, then additional revisions have been
1505
1490
ghosted since the search was executed the first time and the second
1515
1500
return self._keys
1518
"""Return false if the search lists 1 or more revisions."""
1519
return self._recipe[3] == 0
1521
def refine(self, seen, referenced):
1522
"""Create a new search by refining this search.
1524
:param seen: Revisions that have been satisfied.
1525
:param referenced: Revision references observed while satisfying some
1528
start = self._recipe[1]
1529
exclude = self._recipe[2]
1530
count = self._recipe[3]
1531
keys = self.get_keys()
1532
# New heads = referenced + old heads - seen things - exclude
1533
pending_refs = set(referenced)
1534
pending_refs.update(start)
1535
pending_refs.difference_update(seen)
1536
pending_refs.difference_update(exclude)
1537
# New exclude = old exclude + satisfied heads
1538
seen_heads = start.intersection(seen)
1539
exclude.update(seen_heads)
1540
# keys gets seen removed
1542
# length is reduced by len(seen)
1544
return SearchResult(pending_refs, exclude, count, keys)
1547
class PendingAncestryResult(object):
1548
"""A search result that will reconstruct the ancestry for some graph heads.
1550
Unlike SearchResult, this doesn't hold the complete search result in
1551
memory, it just holds a description of how to generate it.
1554
def __init__(self, heads, repo):
1557
:param heads: an iterable of graph heads.
1558
:param repo: a repository to use to generate the ancestry for the given
1561
self.heads = frozenset(heads)
1564
def get_recipe(self):
1565
"""Return a recipe that can be used to replay this search.
1567
The recipe allows reconstruction of the same results at a later date.
1569
:seealso SearchResult.get_recipe:
1571
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1572
To recreate this result, create a PendingAncestryResult with the
1575
return ('proxy-search', self.heads, set(), -1)
1578
"""See SearchResult.get_keys.
1580
Returns all the keys for the ancestry of the heads, excluding
1583
return self._get_keys(self.repo.get_graph())
1585
def _get_keys(self, graph):
1586
NULL_REVISION = revision.NULL_REVISION
1587
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1588
if key != NULL_REVISION and parents is not None]
1592
"""Return false if the search lists 1 or more revisions."""
1593
if revision.NULL_REVISION in self.heads:
1594
return len(self.heads) == 1
1596
return len(self.heads) == 0
1598
def refine(self, seen, referenced):
1599
"""Create a new search by refining this search.
1601
:param seen: Revisions that have been satisfied.
1602
:param referenced: Revision references observed while satisfying some
1605
referenced = self.heads.union(referenced)
1606
return PendingAncestryResult(referenced - seen, self.repo)
1609
1503
def collapse_linear_regions(parent_map):
1610
1504
"""Collapse regions of the graph that are 'linear'.
1677
1571
removed.add(node)
1682
class GraphThunkIdsToKeys(object):
1683
"""Forwards calls about 'ids' to be about keys internally."""
1685
def __init__(self, graph):
1688
def topo_sort(self):
1689
return [r for (r,) in self._graph.topo_sort()]
1691
def heads(self, ids):
1692
"""See Graph.heads()"""
1693
as_keys = [(i,) for i in ids]
1694
head_keys = self._graph.heads(as_keys)
1695
return set([h[0] for h in head_keys])
1697
def merge_sort(self, tip_revision):
1698
return self._graph.merge_sort((tip_revision,))
1701
_counters = [0,0,0,0,0,0,0]
1703
from bzrlib._known_graph_pyx import KnownGraph
1704
except ImportError, e:
1705
osutils.failed_to_load_extension(e)
1706
from bzrlib._known_graph_py import KnownGraph