/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/_annotator_py.py

  • Committer: Robert Collins
  • Date: 2010-05-06 23:41:35 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506234135-yivbzczw1sejxnxc
Lock methods on ``Tree``, ``Branch`` and ``Repository`` are now
expected to return an object which can be used to unlock them. This reduces
duplicate code when using cleanups. The previous 'tokens's returned by
``Branch.lock_write`` and ``Repository.lock_write`` are now attributes
on the result of the lock_write. ``repository.RepositoryWriteLockResult``
and ``branch.BranchWriteLockResult`` document this. (Robert Collins)

``log._get_info_for_log_files`` now takes an add_cleanup callable.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2009, 2010 Canonical Ltd
 
1
# Copyright (C) 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
16
16
 
17
17
"""Functionality for doing annotations in the 'optimal' way"""
18
18
 
19
 
from __future__ import absolute_import
20
 
 
21
 
from .lazy_import import lazy_import
 
19
from bzrlib.lazy_import import lazy_import
22
20
lazy_import(globals(), """
23
 
 
24
 
import patiencediff
25
 
 
26
 
from breezy import (
27
 
    annotate, # Must be lazy to avoid circular importing
28
 
    graph as _mod_graph,
29
 
    )
 
21
from bzrlib import annotate # Must be lazy to avoid circular importing
30
22
""")
31
 
from . import (
 
23
from bzrlib import (
32
24
    errors,
 
25
    graph as _mod_graph,
33
26
    osutils,
 
27
    patiencediff,
34
28
    ui,
35
29
    )
36
 
from .sixish import (
37
 
    range,
38
 
    viewitems,
39
 
    )
40
30
 
41
31
 
42
32
class Annotator(object):
76
66
        self._num_needed_children[key] = 1
77
67
        vf_keys_needed = set()
78
68
        ann_keys_needed = set()
79
 
        needed_keys = {key}
 
69
        needed_keys = set([key])
80
70
        while needed_keys:
81
71
            parent_lookup = []
82
72
            next_parent_map = {}
95
85
                    vf_keys_needed.add(key)
96
86
            needed_keys = set()
97
87
            next_parent_map.update(self._vf.get_parent_map(parent_lookup))
98
 
            for key, parent_keys in viewitems(next_parent_map):
99
 
                if parent_keys is None:  # No graph versionedfile
 
88
            for key, parent_keys in next_parent_map.iteritems():
 
89
                if parent_keys is None: # No graph versionedfile
100
90
                    parent_keys = ()
101
91
                    next_parent_map[key] = ()
102
92
                self._update_needed_children(key, parent_keys)
103
93
                needed_keys.update([key for key in parent_keys
104
 
                                    if key not in parent_map])
 
94
                                         if key not in parent_map])
105
95
            parent_map.update(next_parent_map)
106
 
            # _heads_provider does some graph caching, so it is only valid
107
 
            # while self._parent_map hasn't changed
 
96
            # _heads_provider does some graph caching, so it is only valid while
 
97
            # self._parent_map hasn't changed
108
98
            self._heads_provider = None
109
99
        return vf_keys_needed, ann_keys_needed
110
100
 
120
110
        keys, ann_keys = self._get_needed_keys(key)
121
111
        if pb is not None:
122
112
            pb.update('getting stream', 0, len(keys))
123
 
        stream = self._vf.get_record_stream(keys, 'topological', True)
 
113
        stream  = self._vf.get_record_stream(keys, 'topological', True)
124
114
        for idx, record in enumerate(stream):
125
115
            if pb is not None:
126
116
                pb.update('extracting', 0, len(keys))
127
117
            if record.storage_kind == 'absent':
128
118
                raise errors.RevisionNotPresent(record.key, self._vf)
129
119
            this_key = record.key
130
 
            lines = record.get_bytes_as('lines')
 
120
            lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
131
121
            num_lines = len(lines)
132
122
            self._text_cache[this_key] = lines
133
123
            yield this_key, lines, num_lines
150
140
        parent_lines = self._text_cache[parent_key]
151
141
        parent_annotations = self._annotations_cache[parent_key]
152
142
        # PatienceSequenceMatcher should probably be part of Policy
153
 
        matcher = patiencediff.PatienceSequenceMatcher(
154
 
            None, parent_lines, text)
 
143
        matcher = patiencediff.PatienceSequenceMatcher(None,
 
144
            parent_lines, text)
155
145
        matching_blocks = matcher.get_matching_blocks()
156
146
        return parent_annotations, matching_blocks
157
147
 
159
149
        """Reannotate this text relative to its first parent."""
160
150
        (parent_annotations,
161
151
         matching_blocks) = self._get_parent_annotations_and_matches(
162
 
             key, lines, parent_key)
 
152
                                key, lines, parent_key)
163
153
 
164
154
        for parent_idx, lines_idx, match_len in matching_blocks:
165
155
            # For all matching regions we copy across the parent annotations
171
161
        """Reannotate this text relative to a second (or more) parent."""
172
162
        (parent_annotations,
173
163
         matching_blocks) = self._get_parent_annotations_and_matches(
174
 
             key, lines, parent_key)
 
164
                                key, lines, parent_key)
175
165
 
176
166
        last_ann = None
177
167
        last_parent = None
189
179
            par_sub = parent_annotations[parent_idx:parent_idx + match_len]
190
180
            if ann_sub == par_sub:
191
181
                continue
192
 
            for idx in range(match_len):
 
182
            for idx in xrange(match_len):
193
183
                ann = ann_sub[idx]
194
184
                par_ann = par_sub[idx]
195
185
                ann_idx = lines_idx + idx
263
253
                        each key is a possible source for the given line.
264
254
            lines the text of "key" as a list of lines
265
255
        """
266
 
        with ui.ui_factory.nested_progress_bar() as pb:
267
 
            for text_key, text, num_lines in self._get_needed_texts(
268
 
                    key, pb=pb):
 
256
        pb = ui.ui_factory.nested_progress_bar()
 
257
        try:
 
258
            for text_key, text, num_lines in self._get_needed_texts(key, pb=pb):
269
259
                self._annotate_one(text_key, text, num_lines)
 
260
        finally:
 
261
            pb.finished()
270
262
        try:
271
263
            annotations = self._annotations_cache[key]
272
264
        except KeyError:
285
277
            # Backwards compatibility, break up the heads into pairs and
286
278
            # resolve the result
287
279
            next_head = iter(the_heads)
288
 
            head = next(next_head)
 
280
            head = next_head.next()
289
281
            for possible_head in next_head:
290
282
                annotated_lines = ((head, line), (possible_head, line))
291
283
                head = tiebreaker(annotated_lines)[0]
309
301
            else:
310
302
                the_heads = heads(annotation)
311
303
                if len(the_heads) == 1:
312
 
                    for head in the_heads:
313
 
                        break  # get the item out of the set
 
304
                    for head in the_heads: break # get the item out of the set
314
305
                else:
315
306
                    head = self._resolve_annotation_tie(the_heads, line,
316
307
                                                        custom_tiebreaker)