/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/graph.py

  • Committer: John Arbash Meinel
  • Date: 2008-04-25 17:57:27 UTC
  • mto: This revision was merged to the branch mainline in revision 3407.
  • Revision ID: john@arbash-meinel.com-20080425175727-ll3u7g7qqegvy8us
try collapsing the common revisions into a single searcher.

Show diffs side-by-side

added added

removed removed

Lines of Context:
286
286
        common_searcher.stop_searching_any(
287
287
            common_searcher.find_seen_ancestors(ancestor_all_unique))
288
288
 
 
289
        # Collapse all the common nodes into a single searcher
 
290
        all_unique_searcher = self._make_breadth_first_searcher(ancestor_all_unique)
 
291
        next_unique_searchers = []
 
292
        for searcher in unique_searchers:
 
293
            searcher.stop_searching_any(
 
294
                searcher.find_seen_ancestors(ancestor_all_unique))
 
295
            if searcher._next_query:
 
296
                next_unique_searchers.append(searcher)
 
297
        trace.mutter('Collapsed %s unique searchers into 1 + %s',
 
298
                     len(unique_searchers), len(next_unique_searchers))
 
299
        unique_searchers = next_unique_searchers
 
300
 
289
301
        # While we still have common nodes to search
290
302
        while common_searcher._next_query:
291
303
            newly_seen_common = set(common_searcher.step())
297
309
            for searcher in unique_searchers:
298
310
                unique_are_common_nodes = unique_are_common_nodes.intersection(
299
311
                                            searcher.seen)
 
312
            unique_are_common_nodes.update(all_unique_searcher.step())
 
313
            # TODO: I think we can just use ancestor_all_unique.seen, instead of
 
314
            #       ancestor_all_unique
300
315
            ancestor_all_unique.update(unique_are_common_nodes)
301
316
            if newly_seen_common:
302
317
                # If a 'common' node is an ancestor of all unique searchers, we
305
320
                    ancestor_all_unique.intersection(newly_seen_common))
306
321
            if unique_are_common_nodes:
307
322
                # We have new common-to-all-unique-searchers nodes
 
323
                unique_are_common_nodes.update(
 
324
                    all_unique_searcher.find_seen_ancestors(unique_are_common_nodes))
308
325
                for searcher in unique_searchers:
309
326
                    unique_are_common_nodes.update(
310
327
                        searcher.find_seen_ancestors(unique_are_common_nodes))
313
330
                unique_are_common_nodes.update(
314
331
                    common_searcher.find_seen_ancestors(unique_are_common_nodes))
315
332
 
316
 
                # We can tell all of the unique searchers to start at these
317
 
                # nodes, and tell all of the common searchers to *stop*
318
 
                # searching these nodes
 
333
                # The all_unique searcher can start searching the common nodes
 
334
                # but everyone else can stop.
 
335
                all_unique_searcher.start_searching(unique_are_common_nodes)
319
336
                for searcher in unique_searchers:
320
 
                    searcher.start_searching(unique_are_common_nodes)
 
337
                    searcher.stop_searching_any(unique_are_common_nodes)
321
338
                common_searcher.stop_searching_any(unique_are_common_nodes)
322
339
 
323
340
                # Filter out searchers that don't actually search different
326
343
                unique_search_sets = set()
327
344
                for searcher in unique_searchers:
328
345
                    will_search_set = frozenset(searcher._next_query)
329
 
                    if will_search_set not in unique_search_sets:
 
346
                    if (will_search_set
 
347
                        and will_search_set not in unique_search_sets):
330
348
                        # This searcher is searching a unique set of nodes, let it
331
349
                        unique_search_sets.add(will_search_set)
332
350
                        next_unique_searchers.append(searcher)