/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/_annotator_py.py

  • Committer: Jelmer Vernooij
  • Date: 2019-05-29 03:22:34 UTC
  • mfrom: (7303 work)
  • mto: This revision was merged to the branch mainline in revision 7306.
  • Revision ID: jelmer@jelmer.uk-20190529032234-mt3fuws8gq03tapi
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
20
20
 
21
21
from .lazy_import import lazy_import
22
22
lazy_import(globals(), """
 
23
 
 
24
import patiencediff
 
25
 
23
26
from breezy import (
24
27
    annotate, # Must be lazy to avoid circular importing
25
28
    graph as _mod_graph,
26
 
    patiencediff,
27
29
    )
28
30
""")
29
31
from . import (
94
96
            needed_keys = set()
95
97
            next_parent_map.update(self._vf.get_parent_map(parent_lookup))
96
98
            for key, parent_keys in viewitems(next_parent_map):
97
 
                if parent_keys is None: # No graph versionedfile
 
99
                if parent_keys is None:  # No graph versionedfile
98
100
                    parent_keys = ()
99
101
                    next_parent_map[key] = ()
100
102
                self._update_needed_children(key, parent_keys)
101
103
                needed_keys.update([key for key in parent_keys
102
 
                                         if key not in parent_map])
 
104
                                    if key not in parent_map])
103
105
            parent_map.update(next_parent_map)
104
 
            # _heads_provider does some graph caching, so it is only valid while
105
 
            # self._parent_map hasn't changed
 
106
            # _heads_provider does some graph caching, so it is only valid
 
107
            # while self._parent_map hasn't changed
106
108
            self._heads_provider = None
107
109
        return vf_keys_needed, ann_keys_needed
108
110
 
118
120
        keys, ann_keys = self._get_needed_keys(key)
119
121
        if pb is not None:
120
122
            pb.update('getting stream', 0, len(keys))
121
 
        stream  = self._vf.get_record_stream(keys, 'topological', True)
 
123
        stream = self._vf.get_record_stream(keys, 'topological', True)
122
124
        for idx, record in enumerate(stream):
123
125
            if pb is not None:
124
126
                pb.update('extracting', 0, len(keys))
148
150
        parent_lines = self._text_cache[parent_key]
149
151
        parent_annotations = self._annotations_cache[parent_key]
150
152
        # PatienceSequenceMatcher should probably be part of Policy
151
 
        matcher = patiencediff.PatienceSequenceMatcher(None,
152
 
            parent_lines, text)
 
153
        matcher = patiencediff.PatienceSequenceMatcher(
 
154
            None, parent_lines, text)
153
155
        matching_blocks = matcher.get_matching_blocks()
154
156
        return parent_annotations, matching_blocks
155
157
 
157
159
        """Reannotate this text relative to its first parent."""
158
160
        (parent_annotations,
159
161
         matching_blocks) = self._get_parent_annotations_and_matches(
160
 
                                key, lines, parent_key)
 
162
             key, lines, parent_key)
161
163
 
162
164
        for parent_idx, lines_idx, match_len in matching_blocks:
163
165
            # For all matching regions we copy across the parent annotations
169
171
        """Reannotate this text relative to a second (or more) parent."""
170
172
        (parent_annotations,
171
173
         matching_blocks) = self._get_parent_annotations_and_matches(
172
 
                                key, lines, parent_key)
 
174
             key, lines, parent_key)
173
175
 
174
176
        last_ann = None
175
177
        last_parent = None
262
264
            lines the text of "key" as a list of lines
263
265
        """
264
266
        with ui.ui_factory.nested_progress_bar() as pb:
265
 
            for text_key, text, num_lines in self._get_needed_texts(key, pb=pb):
 
267
            for text_key, text, num_lines in self._get_needed_texts(
 
268
                    key, pb=pb):
266
269
                self._annotate_one(text_key, text, num_lines)
267
270
        try:
268
271
            annotations = self._annotations_cache[key]
306
309
            else:
307
310
                the_heads = heads(annotation)
308
311
                if len(the_heads) == 1:
309
 
                    for head in the_heads: break # get the item out of the set
 
312
                    for head in the_heads:
 
313
                        break  # get the item out of the set
310
314
                else:
311
315
                    head = self._resolve_annotation_tie(the_heads, line,
312
316
                                                        custom_tiebreaker)