/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/_annotator_py.py

  • Committer: Jelmer Vernooij
  • Date: 2018-02-18 21:42:57 UTC
  • mto: This revision was merged to the branch mainline in revision 6859.
  • Revision ID: jelmer@jelmer.uk-20180218214257-jpevutp1wa30tz3v
Update TODO to reference Breezy, not Bazaar.

Show diffs side-by-side

added added

removed removed

Lines of Context:
16
16
 
17
17
"""Functionality for doing annotations in the 'optimal' way"""
18
18
 
 
19
from __future__ import absolute_import
 
20
 
19
21
from .lazy_import import lazy_import
20
22
lazy_import(globals(), """
21
 
 
22
 
import patiencediff
23
 
 
24
23
from breezy import (
25
24
    annotate, # Must be lazy to avoid circular importing
26
25
    graph as _mod_graph,
 
26
    patiencediff,
27
27
    )
28
28
""")
29
29
from . import (
31
31
    osutils,
32
32
    ui,
33
33
    )
 
34
from .sixish import (
 
35
    range,
 
36
    viewitems,
 
37
    )
34
38
 
35
39
 
36
40
class Annotator(object):
89
93
                    vf_keys_needed.add(key)
90
94
            needed_keys = set()
91
95
            next_parent_map.update(self._vf.get_parent_map(parent_lookup))
92
 
            for key, parent_keys in next_parent_map.items():
93
 
                if parent_keys is None:  # No graph versionedfile
 
96
            for key, parent_keys in viewitems(next_parent_map):
 
97
                if parent_keys is None: # No graph versionedfile
94
98
                    parent_keys = ()
95
99
                    next_parent_map[key] = ()
96
100
                self._update_needed_children(key, parent_keys)
97
101
                needed_keys.update([key for key in parent_keys
98
 
                                    if key not in parent_map])
 
102
                                         if key not in parent_map])
99
103
            parent_map.update(next_parent_map)
100
 
            # _heads_provider does some graph caching, so it is only valid
101
 
            # while self._parent_map hasn't changed
 
104
            # _heads_provider does some graph caching, so it is only valid while
 
105
            # self._parent_map hasn't changed
102
106
            self._heads_provider = None
103
107
        return vf_keys_needed, ann_keys_needed
104
108
 
114
118
        keys, ann_keys = self._get_needed_keys(key)
115
119
        if pb is not None:
116
120
            pb.update('getting stream', 0, len(keys))
117
 
        stream = self._vf.get_record_stream(keys, 'topological', True)
 
121
        stream  = self._vf.get_record_stream(keys, 'topological', True)
118
122
        for idx, record in enumerate(stream):
119
123
            if pb is not None:
120
124
                pb.update('extracting', 0, len(keys))
121
125
            if record.storage_kind == 'absent':
122
126
                raise errors.RevisionNotPresent(record.key, self._vf)
123
127
            this_key = record.key
124
 
            lines = record.get_bytes_as('lines')
 
128
            lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
125
129
            num_lines = len(lines)
126
130
            self._text_cache[this_key] = lines
127
131
            yield this_key, lines, num_lines
144
148
        parent_lines = self._text_cache[parent_key]
145
149
        parent_annotations = self._annotations_cache[parent_key]
146
150
        # PatienceSequenceMatcher should probably be part of Policy
147
 
        matcher = patiencediff.PatienceSequenceMatcher(
148
 
            None, parent_lines, text)
 
151
        matcher = patiencediff.PatienceSequenceMatcher(None,
 
152
            parent_lines, text)
149
153
        matching_blocks = matcher.get_matching_blocks()
150
154
        return parent_annotations, matching_blocks
151
155
 
153
157
        """Reannotate this text relative to its first parent."""
154
158
        (parent_annotations,
155
159
         matching_blocks) = self._get_parent_annotations_and_matches(
156
 
             key, lines, parent_key)
 
160
                                key, lines, parent_key)
157
161
 
158
162
        for parent_idx, lines_idx, match_len in matching_blocks:
159
163
            # For all matching regions we copy across the parent annotations
165
169
        """Reannotate this text relative to a second (or more) parent."""
166
170
        (parent_annotations,
167
171
         matching_blocks) = self._get_parent_annotations_and_matches(
168
 
             key, lines, parent_key)
 
172
                                key, lines, parent_key)
169
173
 
170
174
        last_ann = None
171
175
        last_parent = None
257
261
                        each key is a possible source for the given line.
258
262
            lines the text of "key" as a list of lines
259
263
        """
260
 
        with ui.ui_factory.nested_progress_bar() as pb:
261
 
            for text_key, text, num_lines in self._get_needed_texts(
262
 
                    key, pb=pb):
 
264
        pb = ui.ui_factory.nested_progress_bar()
 
265
        try:
 
266
            for text_key, text, num_lines in self._get_needed_texts(key, pb=pb):
263
267
                self._annotate_one(text_key, text, num_lines)
 
268
        finally:
 
269
            pb.finished()
264
270
        try:
265
271
            annotations = self._annotations_cache[key]
266
272
        except KeyError:
303
309
            else:
304
310
                the_heads = heads(annotation)
305
311
                if len(the_heads) == 1:
306
 
                    for head in the_heads:
307
 
                        break  # get the item out of the set
 
312
                    for head in the_heads: break # get the item out of the set
308
313
                else:
309
314
                    head = self._resolve_annotation_tie(the_heads, line,
310
315
                                                        custom_tiebreaker)