1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
69
revision as _mod_revision,
71
from breezy.i18n import gettext, ngettext
80
from .osutils import (
82
format_date_with_offset_in_original_timezone,
83
get_diff_header_encoding,
84
get_terminal_encoding,
92
from .tree import find_previous_path
95
def find_touching_revisions(repository, last_revision, last_tree, last_path):
96
"""Yield a description of revisions which affect the file_id.
98
Each returned element is (revno, revision_id, description)
100
This is the list of revisions where the file is either added,
101
modified, renamed or deleted.
103
TODO: Perhaps some way to limit this to only particular revisions,
104
or to traverse a non-mainline set of revisions?
106
last_verifier = last_tree.get_file_verifier(last_path)
107
graph = repository.get_graph()
108
history = list(graph.iter_lefthand_ancestry(last_revision, []))
110
for revision_id in history:
111
this_tree = repository.revision_tree(revision_id)
112
this_path = find_previous_path(last_tree, this_tree, last_path)
114
# now we know how it was last time, and how it is in this revision.
115
# are those two states effectively the same or not?
116
if this_path is not None and last_path is None:
117
yield revno, revision_id, "deleted " + this_path
118
this_verifier = this_tree.get_file_verifier(this_path)
119
elif this_path is None and last_path is not None:
120
yield revno, revision_id, "added " + last_path
121
elif this_path != last_path:
122
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
123
this_verifier = this_tree.get_file_verifier(this_path)
125
this_verifier = this_tree.get_file_verifier(this_path)
126
if (this_verifier != last_verifier):
127
yield revno, revision_id, "modified " + this_path
129
last_verifier = this_verifier
130
last_path = this_path
131
last_tree = this_tree
132
if last_path is None:
147
"""Write out human-readable log of commits to this branch.
149
This function is being retained for backwards compatibility but
150
should not be extended with new parameters. Use the new Logger class
151
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
152
make_log_request_dict function.
154
:param lf: The LogFormatter object showing the output.
156
:param verbose: If True show added/changed/deleted/renamed files.
158
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
161
:param start_revision: If not None, only show revisions >= start_revision
163
:param end_revision: If not None, only show revisions <= end_revision
165
:param search: If not None, only show revisions with matching commit
168
:param limit: If set, shows only 'limit' revisions, all revisions are shown
171
:param show_diff: If True, output a diff after each revision.
173
:param match: Dictionary of search lists to use when matching revision
185
if isinstance(start_revision, int):
187
start_revision = revisionspec.RevisionInfo(branch, start_revision)
188
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
189
raise errors.InvalidRevisionNumber(start_revision)
191
if isinstance(end_revision, int):
193
end_revision = revisionspec.RevisionInfo(branch, end_revision)
194
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
195
raise errors.InvalidRevisionNumber(end_revision)
197
if end_revision is not None and end_revision.revno == 0:
198
raise errors.InvalidRevisionNumber(end_revision.revno)
200
# Build the request and execute it
201
rqst = make_log_request_dict(
203
start_revision=start_revision, end_revision=end_revision,
204
limit=limit, message_search=search,
205
delta_type=delta_type, diff_type=diff_type)
206
Logger(branch, rqst).show(lf)
209
# Note: This needs to be kept in sync with the defaults in
210
# make_log_request_dict() below
211
_DEFAULT_REQUEST_PARAMS = {
212
'direction': 'reverse',
214
'generate_tags': True,
215
'exclude_common_ancestry': False,
216
'_match_using_deltas': True,
220
def make_log_request_dict(direction='reverse', specific_fileids=None,
221
start_revision=None, end_revision=None, limit=None,
222
message_search=None, levels=None, generate_tags=True,
224
diff_type=None, _match_using_deltas=True,
225
exclude_common_ancestry=False, match=None,
226
signature=False, omit_merges=False,
228
"""Convenience function for making a logging request dictionary.
230
Using this function may make code slightly safer by ensuring
231
parameters have the correct names. It also provides a reference
232
point for documenting the supported parameters.
234
:param direction: 'reverse' (default) is latest to earliest;
235
'forward' is earliest to latest.
237
:param specific_fileids: If not None, only include revisions
238
affecting the specified files, rather than all revisions.
240
:param start_revision: If not None, only generate
241
revisions >= start_revision
243
:param end_revision: If not None, only generate
244
revisions <= end_revision
246
:param limit: If set, generate only 'limit' revisions, all revisions
247
are shown if None or 0.
249
:param message_search: If not None, only include revisions with
250
matching commit messages
252
:param levels: the number of levels of revisions to
253
generate; 1 for just the mainline; 0 for all levels, or None for
256
:param generate_tags: If True, include tags for matched revisions.
258
:param delta_type: Either 'full', 'partial' or None.
259
'full' means generate the complete delta - adds/deletes/modifies/etc;
260
'partial' means filter the delta using specific_fileids;
261
None means do not generate any delta.
263
:param diff_type: Either 'full', 'partial' or None.
264
'full' means generate the complete diff - adds/deletes/modifies/etc;
265
'partial' means filter the diff using specific_fileids;
266
None means do not generate any diff.
268
:param _match_using_deltas: a private parameter controlling the
269
algorithm used for matching specific_fileids. This parameter
270
may be removed in the future so breezy client code should NOT
273
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
274
range operator or as a graph difference.
276
:param signature: show digital signature information
278
:param match: Dictionary of list of search strings to use when filtering
279
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
280
the empty string to match any of the preceding properties.
282
:param omit_merges: If True, commits with more than one parent are
286
# Take care of old style message_search parameter
289
if 'message' in match:
290
match['message'].append(message_search)
292
match['message'] = [message_search]
294
match = {'message': [message_search]}
296
'direction': direction,
297
'specific_fileids': specific_fileids,
298
'start_revision': start_revision,
299
'end_revision': end_revision,
302
'generate_tags': generate_tags,
303
'delta_type': delta_type,
304
'diff_type': diff_type,
305
'exclude_common_ancestry': exclude_common_ancestry,
306
'signature': signature,
308
'omit_merges': omit_merges,
309
# Add 'private' attributes for features that may be deprecated
310
'_match_using_deltas': _match_using_deltas,
314
def _apply_log_request_defaults(rqst):
315
"""Apply default values to a request dictionary."""
316
result = _DEFAULT_REQUEST_PARAMS.copy()
322
def format_signature_validity(rev_id, branch):
323
"""get the signature validity
325
:param rev_id: revision id to validate
326
:param branch: branch of revision
327
:return: human readable string to print to log
329
from breezy import gpg
331
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
332
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
333
if result[0] == gpg.SIGNATURE_VALID:
334
return u"valid signature from {0}".format(result[1])
335
if result[0] == gpg.SIGNATURE_KEY_MISSING:
336
return "unknown key {0}".format(result[1])
337
if result[0] == gpg.SIGNATURE_NOT_VALID:
338
return "invalid signature!"
339
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
340
return "no signature"
343
class LogGenerator(object):
344
"""A generator of log revisions."""
346
def iter_log_revisions(self):
347
"""Iterate over LogRevision objects.
349
:return: An iterator yielding LogRevision objects.
351
raise NotImplementedError(self.iter_log_revisions)
354
class Logger(object):
355
"""An object that generates, formats and displays a log."""
357
def __init__(self, branch, rqst):
360
:param branch: the branch to log
361
:param rqst: A dictionary specifying the query parameters.
362
See make_log_request_dict() for supported values.
365
self.rqst = _apply_log_request_defaults(rqst)
370
:param lf: The LogFormatter object to send the output to.
372
if not isinstance(lf, LogFormatter):
373
warn("not a LogFormatter instance: %r" % lf)
375
with self.branch.lock_read():
376
if getattr(lf, 'begin_log', None):
379
if getattr(lf, 'end_log', None):
382
def _show_body(self, lf):
383
"""Show the main log output.
385
Subclasses may wish to override this.
387
# Tweak the LogRequest based on what the LogFormatter can handle.
388
# (There's no point generating stuff if the formatter can't display it.)
390
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
391
# user didn't specify levels, use whatever the LF can handle:
392
rqst['levels'] = lf.get_levels()
394
if not getattr(lf, 'supports_tags', False):
395
rqst['generate_tags'] = False
396
if not getattr(lf, 'supports_delta', False):
397
rqst['delta_type'] = None
398
if not getattr(lf, 'supports_diff', False):
399
rqst['diff_type'] = None
400
if not getattr(lf, 'supports_signatures', False):
401
rqst['signature'] = False
403
# Find and print the interesting revisions
404
generator = self._generator_factory(self.branch, rqst)
406
for lr in generator.iter_log_revisions():
408
except errors.GhostRevisionUnusableHere:
409
raise errors.BzrCommandError(
410
gettext('Further revision history missing.'))
413
def _generator_factory(self, branch, rqst):
414
"""Make the LogGenerator object to use.
416
Subclasses may wish to override this.
418
return _DefaultLogGenerator(branch, rqst)
421
class _StartNotLinearAncestor(Exception):
422
"""Raised when a start revision is not found walking left-hand history."""
425
class _DefaultLogGenerator(LogGenerator):
426
"""The default generator of log revisions."""
428
def __init__(self, branch, rqst):
431
if rqst.get('generate_tags') and branch.supports_tags():
432
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
434
self.rev_tag_dict = {}
436
def iter_log_revisions(self):
437
"""Iterate over LogRevision objects.
439
:return: An iterator yielding LogRevision objects.
442
levels = rqst.get('levels')
443
limit = rqst.get('limit')
444
diff_type = rqst.get('diff_type')
445
show_signature = rqst.get('signature')
446
omit_merges = rqst.get('omit_merges')
448
revision_iterator = self._create_log_revision_iterator()
449
for revs in revision_iterator:
450
for (rev_id, revno, merge_depth), rev, delta in revs:
451
# 0 levels means show everything; merge_depth counts from 0
452
if (levels != 0 and merge_depth is not None and
453
merge_depth >= levels):
455
if omit_merges and len(rev.parent_ids) > 1:
458
raise errors.GhostRevisionUnusableHere(rev_id)
459
if diff_type is None:
462
diff = self._format_diff(rev, rev_id, diff_type)
464
signature = format_signature_validity(rev_id, self.branch)
468
rev, revno, merge_depth, delta,
469
self.rev_tag_dict.get(rev_id), diff, signature)
472
if log_count >= limit:
475
def _format_diff(self, rev, rev_id, diff_type):
476
repo = self.branch.repository
477
if len(rev.parent_ids) == 0:
478
ancestor_id = _mod_revision.NULL_REVISION
480
ancestor_id = rev.parent_ids[0]
481
tree_1 = repo.revision_tree(ancestor_id)
482
tree_2 = repo.revision_tree(rev_id)
483
file_ids = self.rqst.get('specific_fileids')
484
if diff_type == 'partial' and file_ids is not None:
485
specific_files = [tree_2.id2path(id) for id in file_ids]
487
specific_files = None
489
path_encoding = get_diff_header_encoding()
490
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
491
new_label='', path_encoding=path_encoding)
494
def _create_log_revision_iterator(self):
495
"""Create a revision iterator for log.
497
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
500
self.start_rev_id, self.end_rev_id = _get_revision_limits(
501
self.branch, self.rqst.get('start_revision'),
502
self.rqst.get('end_revision'))
503
if self.rqst.get('_match_using_deltas'):
504
return self._log_revision_iterator_using_delta_matching()
506
# We're using the per-file-graph algorithm. This scales really
507
# well but only makes sense if there is a single file and it's
509
file_count = len(self.rqst.get('specific_fileids'))
511
raise errors.BzrError(
512
"illegal LogRequest: must match-using-deltas "
513
"when logging %d files" % file_count)
514
return self._log_revision_iterator_using_per_file_graph()
516
def _log_revision_iterator_using_delta_matching(self):
517
# Get the base revisions, filtering by the revision range
519
generate_merge_revisions = rqst.get('levels') != 1
520
delayed_graph_generation = not rqst.get('specific_fileids') and (
521
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
522
view_revisions = _calc_view_revisions(
523
self.branch, self.start_rev_id, self.end_rev_id,
524
rqst.get('direction'),
525
generate_merge_revisions=generate_merge_revisions,
526
delayed_graph_generation=delayed_graph_generation,
527
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
529
# Apply the other filters
530
return make_log_rev_iterator(self.branch, view_revisions,
531
rqst.get('delta_type'), rqst.get('match'),
532
file_ids=rqst.get('specific_fileids'),
533
direction=rqst.get('direction'))
535
def _log_revision_iterator_using_per_file_graph(self):
536
# Get the base revisions, filtering by the revision range.
537
# Note that we always generate the merge revisions because
538
# filter_revisions_touching_file_id() requires them ...
540
view_revisions = _calc_view_revisions(
541
self.branch, self.start_rev_id, self.end_rev_id,
542
rqst.get('direction'), generate_merge_revisions=True,
543
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
544
if not isinstance(view_revisions, list):
545
view_revisions = list(view_revisions)
546
view_revisions = _filter_revisions_touching_file_id(self.branch,
547
rqst.get('specific_fileids')[
549
include_merges=rqst.get('levels') != 1)
550
return make_log_rev_iterator(self.branch, view_revisions,
551
rqst.get('delta_type'), rqst.get('match'))
554
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
555
generate_merge_revisions,
556
delayed_graph_generation=False,
557
exclude_common_ancestry=False,
559
"""Calculate the revisions to view.
561
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
562
a list of the same tuples.
564
if (exclude_common_ancestry and start_rev_id == end_rev_id):
565
raise errors.BzrCommandError(gettext(
566
'--exclude-common-ancestry requires two different revisions'))
567
if direction not in ('reverse', 'forward'):
568
raise ValueError(gettext('invalid direction %r') % direction)
569
br_rev_id = branch.last_revision()
570
if br_rev_id == _mod_revision.NULL_REVISION:
573
if (end_rev_id and start_rev_id == end_rev_id
574
and (not generate_merge_revisions
575
or not _has_merges(branch, end_rev_id))):
576
# If a single revision is requested, check we can handle it
577
return _generate_one_revision(branch, end_rev_id, br_rev_id,
579
if not generate_merge_revisions:
581
# If we only want to see linear revisions, we can iterate ...
582
iter_revs = _linear_view_revisions(
583
branch, start_rev_id, end_rev_id,
584
exclude_common_ancestry=exclude_common_ancestry)
585
# If a start limit was given and it's not obviously an
586
# ancestor of the end limit, check it before outputting anything
587
if (direction == 'forward'
588
or (start_rev_id and not _is_obvious_ancestor(
589
branch, start_rev_id, end_rev_id))):
590
iter_revs = list(iter_revs)
591
if direction == 'forward':
592
iter_revs = reversed(iter_revs)
594
except _StartNotLinearAncestor:
595
# Switch to the slower implementation that may be able to find a
596
# non-obvious ancestor out of the left-hand history.
598
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
599
direction, delayed_graph_generation,
600
exclude_common_ancestry)
601
if direction == 'forward':
602
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
606
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
607
if rev_id == br_rev_id:
609
return [(br_rev_id, br_revno, 0)]
611
revno_str = _compute_revno_str(branch, rev_id)
612
return [(rev_id, revno_str, 0)]
615
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
616
delayed_graph_generation,
617
exclude_common_ancestry=False):
618
# On large trees, generating the merge graph can take 30-60 seconds
619
# so we delay doing it until a merge is detected, incrementally
620
# returning initial (non-merge) revisions while we can.
622
# The above is only true for old formats (<= 0.92), for newer formats, a
623
# couple of seconds only should be needed to load the whole graph and the
624
# other graph operations needed are even faster than that -- vila 100201
625
initial_revisions = []
626
if delayed_graph_generation:
628
for rev_id, revno, depth in _linear_view_revisions(
629
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
630
if _has_merges(branch, rev_id):
631
# The end_rev_id can be nested down somewhere. We need an
632
# explicit ancestry check. There is an ambiguity here as we
633
# may not raise _StartNotLinearAncestor for a revision that
634
# is an ancestor but not a *linear* one. But since we have
635
# loaded the graph to do the check (or calculate a dotted
636
# revno), we may as well accept to show the log... We need
637
# the check only if start_rev_id is not None as all
638
# revisions have _mod_revision.NULL_REVISION as an ancestor
640
graph = branch.repository.get_graph()
641
if (start_rev_id is not None
642
and not graph.is_ancestor(start_rev_id, end_rev_id)):
643
raise _StartNotLinearAncestor()
644
# Since we collected the revisions so far, we need to
649
initial_revisions.append((rev_id, revno, depth))
651
# No merged revisions found
652
return initial_revisions
653
except _StartNotLinearAncestor:
654
# A merge was never detected so the lower revision limit can't
655
# be nested down somewhere
656
raise errors.BzrCommandError(gettext('Start revision not found in'
657
' history of end revision.'))
659
# We exit the loop above because we encounter a revision with merges, from
660
# this revision, we need to switch to _graph_view_revisions.
662
# A log including nested merges is required. If the direction is reverse,
663
# we rebase the initial merge depths so that the development line is
664
# shown naturally, i.e. just like it is for linear logging. We can easily
665
# make forward the exact opposite display, but showing the merge revisions
666
# indented at the end seems slightly nicer in that case.
667
view_revisions = itertools.chain(iter(initial_revisions),
668
_graph_view_revisions(branch, start_rev_id, end_rev_id,
669
rebase_initial_depths=(
670
direction == 'reverse'),
671
exclude_common_ancestry=exclude_common_ancestry))
672
return view_revisions
675
def _has_merges(branch, rev_id):
676
"""Does a revision have multiple parents or not?"""
677
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
678
return len(parents) > 1
681
def _compute_revno_str(branch, rev_id):
682
"""Compute the revno string from a rev_id.
684
:return: The revno string, or None if the revision is not in the supplied
688
revno = branch.revision_id_to_dotted_revno(rev_id)
689
except errors.NoSuchRevision:
690
# The revision must be outside of this branch
693
return '.'.join(str(n) for n in revno)
696
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
697
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
698
if start_rev_id and end_rev_id:
700
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
701
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
702
except errors.NoSuchRevision:
703
# one or both is not in the branch; not obvious
705
if len(start_dotted) == 1 and len(end_dotted) == 1:
707
return start_dotted[0] <= end_dotted[0]
708
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
709
start_dotted[0:1] == end_dotted[0:1]):
710
# both on same development line
711
return start_dotted[2] <= end_dotted[2]
715
# if either start or end is not specified then we use either the first or
716
# the last revision and *they* are obvious ancestors.
720
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
721
exclude_common_ancestry=False):
722
"""Calculate a sequence of revisions to view, newest to oldest.
724
:param start_rev_id: the lower revision-id
725
:param end_rev_id: the upper revision-id
726
:param exclude_common_ancestry: Whether the start_rev_id should be part of
727
the iterated revisions.
728
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
729
dotted_revno will be None for ghosts
730
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
731
is not found walking the left-hand history
733
repo = branch.repository
734
graph = repo.get_graph()
735
if start_rev_id is None and end_rev_id is None:
737
br_revno, br_rev_id = branch.last_revision_info()
738
except errors.GhostRevisionsHaveNoRevno:
739
br_rev_id = branch.last_revision()
743
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
744
(_mod_revision.NULL_REVISION,))
747
revision_id = next(graph_iter)
748
except errors.RevisionNotPresent as e:
750
yield e.revision_id, None, None
752
except StopIteration:
755
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
756
if cur_revno is not None:
759
br_rev_id = branch.last_revision()
760
if end_rev_id is None:
761
end_rev_id = br_rev_id
762
found_start = start_rev_id is None
763
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
764
(_mod_revision.NULL_REVISION,))
767
revision_id = next(graph_iter)
768
except StopIteration:
770
except errors.RevisionNotPresent as e:
772
yield e.revision_id, None, None
775
revno_str = _compute_revno_str(branch, revision_id)
776
if not found_start and revision_id == start_rev_id:
777
if not exclude_common_ancestry:
778
yield revision_id, revno_str, 0
782
yield revision_id, revno_str, 0
784
raise _StartNotLinearAncestor()
787
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
788
rebase_initial_depths=True,
789
exclude_common_ancestry=False):
790
"""Calculate revisions to view including merges, newest to oldest.
792
:param branch: the branch
793
:param start_rev_id: the lower revision-id
794
:param end_rev_id: the upper revision-id
795
:param rebase_initial_depth: should depths be rebased until a mainline
797
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
799
if exclude_common_ancestry:
800
stop_rule = 'with-merges-without-common-ancestry'
802
stop_rule = 'with-merges'
803
view_revisions = branch.iter_merge_sorted_revisions(
804
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
806
if not rebase_initial_depths:
807
for (rev_id, merge_depth, revno, end_of_merge
809
yield rev_id, '.'.join(map(str, revno)), merge_depth
811
# We're following a development line starting at a merged revision.
812
# We need to adjust depths down by the initial depth until we find
813
# a depth less than it. Then we use that depth as the adjustment.
814
# If and when we reach the mainline, depth adjustment ends.
815
depth_adjustment = None
816
for (rev_id, merge_depth, revno, end_of_merge
818
if depth_adjustment is None:
819
depth_adjustment = merge_depth
821
if merge_depth < depth_adjustment:
822
# From now on we reduce the depth adjustement, this can be
823
# surprising for users. The alternative requires two passes
824
# which breaks the fast display of the first revision
826
depth_adjustment = merge_depth
827
merge_depth -= depth_adjustment
828
yield rev_id, '.'.join(map(str, revno)), merge_depth
831
def _rebase_merge_depth(view_revisions):
832
"""Adjust depths upwards so the top level is 0."""
833
# If either the first or last revision have a merge_depth of 0, we're done
834
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
835
min_depth = min([d for r, n, d in view_revisions])
837
view_revisions = [(r, n, d - min_depth)
838
for r, n, d in view_revisions]
839
return view_revisions
842
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
843
file_ids=None, direction='reverse'):
844
"""Create a revision iterator for log.
846
:param branch: The branch being logged.
847
:param view_revisions: The revisions being viewed.
848
:param generate_delta: Whether to generate a delta for each revision.
849
Permitted values are None, 'full' and 'partial'.
850
:param search: A user text search string.
851
:param file_ids: If non empty, only revisions matching one or more of
852
the file-ids are to be kept.
853
:param direction: the direction in which view_revisions is sorted
854
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
857
# Convert view_revisions into (view, None, None) groups to fit with
858
# the standard interface here.
859
if isinstance(view_revisions, list):
860
# A single batch conversion is faster than many incremental ones.
861
# As we have all the data, do a batch conversion.
862
nones = [None] * len(view_revisions)
863
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
866
for view in view_revisions:
867
yield (view, None, None)
868
log_rev_iterator = iter([_convert()])
869
for adapter in log_adapters:
870
# It would be nicer if log adapters were first class objects
871
# with custom parameters. This will do for now. IGC 20090127
872
if adapter == _make_delta_filter:
873
log_rev_iterator = adapter(
874
branch, generate_delta, search, log_rev_iterator, file_ids,
877
log_rev_iterator = adapter(
878
branch, generate_delta, search, log_rev_iterator)
879
return log_rev_iterator
882
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
883
"""Create a filtered iterator of log_rev_iterator matching on a regex.
885
:param branch: The branch being logged.
886
:param generate_delta: Whether to generate a delta for each revision.
887
:param match: A dictionary with properties as keys and lists of strings
888
as values. To match, a revision may match any of the supplied strings
889
within a single property but must match at least one string for each
891
:param log_rev_iterator: An input iterator containing all revisions that
892
could be displayed, in lists.
893
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
897
return log_rev_iterator
898
# Use lazy_compile so mapping to InvalidPattern error occurs.
899
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
900
for k, v in match.items()]
901
return _filter_re(searchRE, log_rev_iterator)
904
def _filter_re(searchRE, log_rev_iterator):
905
for revs in log_rev_iterator:
906
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
911
def _match_filter(searchRE, rev):
913
'message': (rev.message,),
914
'committer': (rev.committer,),
915
'author': (rev.get_apparent_authors()),
916
'bugs': list(rev.iter_bugs())
918
strings[''] = [item for inner_list in strings.values()
919
for item in inner_list]
920
for k, v in searchRE:
921
if k in strings and not _match_any_filter(strings[k], v):
926
def _match_any_filter(strings, res):
927
return any(r.search(s) for r in res for s in strings)
930
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
931
fileids=None, direction='reverse'):
932
"""Add revision deltas to a log iterator if needed.
934
:param branch: The branch being logged.
935
:param generate_delta: Whether to generate a delta for each revision.
936
Permitted values are None, 'full' and 'partial'.
937
:param search: A user text search string.
938
:param log_rev_iterator: An input iterator containing all revisions that
939
could be displayed, in lists.
940
:param fileids: If non empty, only revisions matching one or more of
941
the file-ids are to be kept.
942
:param direction: the direction in which view_revisions is sorted
943
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
946
if not generate_delta and not fileids:
947
return log_rev_iterator
948
return _generate_deltas(branch.repository, log_rev_iterator,
949
generate_delta, fileids, direction)
952
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
954
"""Create deltas for each batch of revisions in log_rev_iterator.
956
If we're only generating deltas for the sake of filtering against
957
file-ids, we stop generating deltas once all file-ids reach the
958
appropriate life-cycle point. If we're receiving data newest to
959
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
961
check_fileids = fileids is not None and len(fileids) > 0
963
fileid_set = set(fileids)
964
if direction == 'reverse':
970
for revs in log_rev_iterator:
971
# If we were matching against fileids and we've run out,
972
# there's nothing left to do
973
if check_fileids and not fileid_set:
975
revisions = [rev[1] for rev in revs]
977
if delta_type == 'full' and not check_fileids:
978
deltas = repository.get_deltas_for_revisions(revisions)
979
for rev, delta in zip(revs, deltas):
980
new_revs.append((rev[0], rev[1], delta))
982
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
983
for rev, delta in zip(revs, deltas):
985
if delta is None or not delta.has_changed():
988
_update_fileids(delta, fileid_set, stop_on)
989
if delta_type is None:
991
elif delta_type == 'full':
992
# If the file matches all the time, rebuilding
993
# a full delta like this in addition to a partial
994
# one could be slow. However, it's likely that
995
# most revisions won't get this far, making it
996
# faster to filter on the partial deltas and
997
# build the occasional full delta than always
998
# building full deltas and filtering those.
1000
delta = repository.get_revision_delta(rev_id)
1001
new_revs.append((rev[0], rev[1], delta))
1005
def _update_fileids(delta, fileids, stop_on):
1006
"""Update the set of file-ids to search based on file lifecycle events.
1008
:param fileids: a set of fileids to update
1009
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1010
fileids set once their add or remove entry is detected respectively
1012
if stop_on == 'add':
1013
for item in delta.added:
1014
if item[1] in fileids:
1015
fileids.remove(item[1])
1016
elif stop_on == 'delete':
1017
for item in delta.removed:
1018
if item[1] in fileids:
1019
fileids.remove(item[1])
1022
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1023
"""Extract revision objects from the repository
1025
:param branch: The branch being logged.
1026
:param generate_delta: Whether to generate a delta for each revision.
1027
:param search: A user text search string.
1028
:param log_rev_iterator: An input iterator containing all revisions that
1029
could be displayed, in lists.
1030
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1033
repository = branch.repository
1034
for revs in log_rev_iterator:
1035
# r = revision_id, n = revno, d = merge depth
1036
revision_ids = [view[0] for view, _, _ in revs]
1037
revisions = dict(repository.iter_revisions(revision_ids))
1038
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1041
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1042
"""Group up a single large batch into smaller ones.
1044
:param branch: The branch being logged.
1045
:param generate_delta: Whether to generate a delta for each revision.
1046
:param search: A user text search string.
1047
:param log_rev_iterator: An input iterator containing all revisions that
1048
could be displayed, in lists.
1049
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1053
for batch in log_rev_iterator:
1056
step = [detail for _, detail in zip(range(num), batch)]
1060
num = min(int(num * 1.5), 200)
1063
def _get_revision_limits(branch, start_revision, end_revision):
1064
"""Get and check revision limits.
1066
:param branch: The branch containing the revisions.
1068
:param start_revision: The first revision to be logged.
1069
but for merge revision support a RevisionInfo is expected.
1071
:param end_revision: The last revision to be logged.
1072
For backwards compatibility this may be a mainline integer revno,
1073
but for merge revision support a RevisionInfo is expected.
1075
:return: (start_rev_id, end_rev_id) tuple.
1079
if start_revision is not None:
1080
if not isinstance(start_revision, revisionspec.RevisionInfo):
1081
raise TypeError(start_revision)
1082
start_rev_id = start_revision.rev_id
1083
start_revno = start_revision.revno
1084
if start_revno is None:
1089
if end_revision is not None:
1090
if not isinstance(end_revision, revisionspec.RevisionInfo):
1091
raise TypeError(start_revision)
1092
end_rev_id = end_revision.rev_id
1093
end_revno = end_revision.revno
1094
if end_revno is None:
1096
end_revno = branch.revno()
1097
except errors.GhostRevisionsHaveNoRevno:
1100
if branch.last_revision() != _mod_revision.NULL_REVISION:
1101
if (start_rev_id == _mod_revision.NULL_REVISION
1102
or end_rev_id == _mod_revision.NULL_REVISION):
1103
raise errors.BzrCommandError(
1104
gettext('Logging revision 0 is invalid.'))
1105
if end_revno is not None and start_revno > end_revno:
1106
raise errors.BzrCommandError(
1107
gettext("Start revision must be older than the end revision."))
1108
return (start_rev_id, end_rev_id)
1111
def _get_mainline_revs(branch, start_revision, end_revision):
1112
"""Get the mainline revisions from the branch.
1114
Generates the list of mainline revisions for the branch.
1116
:param branch: The branch containing the revisions.
1118
:param start_revision: The first revision to be logged.
1119
For backwards compatibility this may be a mainline integer revno,
1120
but for merge revision support a RevisionInfo is expected.
1122
:param end_revision: The last revision to be logged.
1123
For backwards compatibility this may be a mainline integer revno,
1124
but for merge revision support a RevisionInfo is expected.
1126
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1128
branch_revno, branch_last_revision = branch.last_revision_info()
1129
if branch_revno == 0:
1130
return None, None, None, None
1132
# For mainline generation, map start_revision and end_revision to
1133
# mainline revnos. If the revision is not on the mainline choose the
1134
# appropriate extreme of the mainline instead - the extra will be
1136
# Also map the revisions to rev_ids, to be used in the later filtering
1139
if start_revision is None:
1142
if isinstance(start_revision, revisionspec.RevisionInfo):
1143
start_rev_id = start_revision.rev_id
1144
start_revno = start_revision.revno or 1
1146
branch.check_real_revno(start_revision)
1147
start_revno = start_revision
1150
if end_revision is None:
1151
end_revno = branch_revno
1153
if isinstance(end_revision, revisionspec.RevisionInfo):
1154
end_rev_id = end_revision.rev_id
1155
end_revno = end_revision.revno or branch_revno
1157
branch.check_real_revno(end_revision)
1158
end_revno = end_revision
1160
if ((start_rev_id == _mod_revision.NULL_REVISION)
1161
or (end_rev_id == _mod_revision.NULL_REVISION)):
1162
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1163
if start_revno > end_revno:
1164
raise errors.BzrCommandError(gettext("Start revision must be older "
1165
"than the end revision."))
1167
if end_revno < start_revno:
1168
return None, None, None, None
1169
cur_revno = branch_revno
1172
graph = branch.repository.get_graph()
1173
for revision_id in graph.iter_lefthand_ancestry(
1174
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1175
if cur_revno < start_revno:
1176
# We have gone far enough, but we always add 1 more revision
1177
rev_nos[revision_id] = cur_revno
1178
mainline_revs.append(revision_id)
1180
if cur_revno <= end_revno:
1181
rev_nos[revision_id] = cur_revno
1182
mainline_revs.append(revision_id)
1185
# We walked off the edge of all revisions, so we add a 'None' marker
1186
mainline_revs.append(None)
1188
mainline_revs.reverse()
1190
# override the mainline to look like the revision history.
1191
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1194
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1195
include_merges=True):
1196
r"""Return the list of revision ids which touch a given file id.
1198
The function filters view_revisions and returns a subset.
1199
This includes the revisions which directly change the file id,
1200
and the revisions which merge these changes. So if the
1213
And 'C' changes a file, then both C and D will be returned. F will not be
1214
returned even though it brings the changes to C into the branch starting
1215
with E. (Note that if we were using F as the tip instead of G, then we
1218
This will also be restricted based on a subset of the mainline.
1220
:param branch: The branch where we can get text revision information.
1222
:param file_id: Filter out revisions that do not touch file_id.
1224
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1225
tuples. This is the list of revisions which will be filtered. It is
1226
assumed that view_revisions is in merge_sort order (i.e. newest
1229
:param include_merges: include merge revisions in the result or not
1231
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1233
# Lookup all possible text keys to determine which ones actually modified
1235
graph = branch.repository.get_file_graph()
1236
get_parent_map = graph.get_parent_map
1237
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1239
# Looking up keys in batches of 1000 can cut the time in half, as well as
1240
# memory consumption. GraphIndex *does* like to look for a few keys in
1241
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1242
# TODO: This code needs to be re-evaluated periodically as we tune the
1243
# indexing layer. We might consider passing in hints as to the known
1244
# access pattern (sparse/clustered, high success rate/low success
1245
# rate). This particular access is clustered with a low success rate.
1246
modified_text_revisions = set()
1248
for start in range(0, len(text_keys), chunk_size):
1249
next_keys = text_keys[start:start + chunk_size]
1250
# Only keep the revision_id portion of the key
1251
modified_text_revisions.update(
1252
[k[1] for k in get_parent_map(next_keys)])
1253
del text_keys, next_keys
1256
# Track what revisions will merge the current revision, replace entries
1257
# with 'None' when they have been added to result
1258
current_merge_stack = [None]
1259
for info in view_revisions:
1260
rev_id, revno, depth = info
1261
if depth == len(current_merge_stack):
1262
current_merge_stack.append(info)
1264
del current_merge_stack[depth + 1:]
1265
current_merge_stack[-1] = info
1267
if rev_id in modified_text_revisions:
1268
# This needs to be logged, along with the extra revisions
1269
for idx in range(len(current_merge_stack)):
1270
node = current_merge_stack[idx]
1271
if node is not None:
1272
if include_merges or node[2] == 0:
1274
current_merge_stack[idx] = None
1278
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1279
"""Reverse revisions by depth.
1281
Revisions with a different depth are sorted as a group with the previous
1282
revision of that depth. There may be no topological justification for this
1283
but it looks much nicer.
1285
# Add a fake revision at start so that we can always attach sub revisions
1286
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1288
for val in merge_sorted_revisions:
1289
if val[2] == _depth:
1290
# Each revision at the current depth becomes a chunk grouping all
1291
# higher depth revisions.
1292
zd_revisions.append([val])
1294
zd_revisions[-1].append(val)
1295
for revisions in zd_revisions:
1296
if len(revisions) > 1:
1297
# We have higher depth revisions, let reverse them locally
1298
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1299
zd_revisions.reverse()
1301
for chunk in zd_revisions:
1302
result.extend(chunk)
1304
# Top level call, get rid of the fake revisions that have been added
1305
result = [r for r in result if r[0] is not None and r[1] is not None]
1309
class LogRevision(object):
1310
"""A revision to be logged (by LogFormatter.log_revision).
1312
A simple wrapper for the attributes of a revision to be logged.
1313
The attributes may or may not be populated, as determined by the
1314
logging options and the log formatter capabilities.
1317
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1318
tags=None, diff=None, signature=None):
1323
self.revno = str(revno)
1324
self.merge_depth = merge_depth
1328
self.signature = signature
1331
class LogFormatter(object):
1332
"""Abstract class to display log messages.
1334
At a minimum, a derived class must implement the log_revision method.
1336
If the LogFormatter needs to be informed of the beginning or end of
1337
a log it should implement the begin_log and/or end_log hook methods.
1339
A LogFormatter should define the following supports_XXX flags
1340
to indicate which LogRevision attributes it supports:
1342
- supports_delta must be True if this log formatter supports delta.
1343
Otherwise the delta attribute may not be populated. The 'delta_format'
1344
attribute describes whether the 'short_status' format (1) or the long
1345
one (2) should be used.
1347
- supports_merge_revisions must be True if this log formatter supports
1348
merge revisions. If not, then only mainline revisions will be passed
1351
- preferred_levels is the number of levels this formatter defaults to.
1352
The default value is zero meaning display all levels.
1353
This value is only relevant if supports_merge_revisions is True.
1355
- supports_tags must be True if this log formatter supports tags.
1356
Otherwise the tags attribute may not be populated.
1358
- supports_diff must be True if this log formatter supports diffs.
1359
Otherwise the diff attribute may not be populated.
1361
- supports_signatures must be True if this log formatter supports GPG
1364
Plugins can register functions to show custom revision properties using
1365
the properties_handler_registry. The registered function
1366
must respect the following interface description::
1368
def my_show_properties(properties_dict):
1369
# code that returns a dict {'name':'value'} of the properties
1372
preferred_levels = 0
1374
def __init__(self, to_file, show_ids=False, show_timezone='original',
1375
delta_format=None, levels=None, show_advice=False,
1376
to_exact_file=None, author_list_handler=None):
1377
"""Create a LogFormatter.
1379
:param to_file: the file to output to
1380
:param to_exact_file: if set, gives an output stream to which
1381
non-Unicode diffs are written.
1382
:param show_ids: if True, revision-ids are to be displayed
1383
:param show_timezone: the timezone to use
1384
:param delta_format: the level of delta information to display
1385
or None to leave it to the formatter to decide
1386
:param levels: the number of levels to display; None or -1 to
1387
let the log formatter decide.
1388
:param show_advice: whether to show advice at the end of the
1390
:param author_list_handler: callable generating a list of
1391
authors to display for a given revision
1393
self.to_file = to_file
1394
# 'exact' stream used to show diff, it should print content 'as is'
1395
# and should not try to decode/encode it to unicode to avoid bug
1397
if to_exact_file is not None:
1398
self.to_exact_file = to_exact_file
1400
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1401
# better for code that expects to get diffs to pass in the exact
1403
self.to_exact_file = getattr(to_file, 'stream', to_file)
1404
self.show_ids = show_ids
1405
self.show_timezone = show_timezone
1406
if delta_format is None:
1407
# Ensures backward compatibility
1408
delta_format = 2 # long format
1409
self.delta_format = delta_format
1410
self.levels = levels
1411
self._show_advice = show_advice
1412
self._merge_count = 0
1413
self._author_list_handler = author_list_handler
1415
def get_levels(self):
1416
"""Get the number of levels to display or 0 for all."""
1417
if getattr(self, 'supports_merge_revisions', False):
1418
if self.levels is None or self.levels == -1:
1419
self.levels = self.preferred_levels
1424
def log_revision(self, revision):
1427
:param revision: The LogRevision to be logged.
1429
raise NotImplementedError('not implemented in abstract base')
1431
def show_advice(self):
1432
"""Output user advice, if any, when the log is completed."""
1433
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1434
advice_sep = self.get_advice_separator()
1436
self.to_file.write(advice_sep)
1438
"Use --include-merged or -n0 to see merged revisions.\n")
1440
def get_advice_separator(self):
1441
"""Get the text separating the log from the closing advice."""
1444
def short_committer(self, rev):
1445
name, address = config.parse_username(rev.committer)
1450
def short_author(self, rev):
1451
return self.authors(rev, 'first', short=True, sep=', ')
1453
def authors(self, rev, who, short=False, sep=None):
1454
"""Generate list of authors, taking --authors option into account.
1456
The caller has to specify the name of a author list handler,
1457
as provided by the author list registry, using the ``who``
1458
argument. That name only sets a default, though: when the
1459
user selected a different author list generation using the
1460
``--authors`` command line switch, as represented by the
1461
``author_list_handler`` constructor argument, that value takes
1464
:param rev: The revision for which to generate the list of authors.
1465
:param who: Name of the default handler.
1466
:param short: Whether to shorten names to either name or address.
1467
:param sep: What separator to use for automatic concatenation.
1469
if self._author_list_handler is not None:
1470
# The user did specify --authors, which overrides the default
1471
author_list_handler = self._author_list_handler
1473
# The user didn't specify --authors, so we use the caller's default
1474
author_list_handler = author_list_registry.get(who)
1475
names = author_list_handler(rev)
1477
for i in range(len(names)):
1478
name, address = config.parse_username(names[i])
1484
names = sep.join(names)
1487
def merge_marker(self, revision):
1488
"""Get the merge marker to include in the output or '' if none."""
1489
if len(revision.rev.parent_ids) > 1:
1490
self._merge_count += 1
1495
def show_properties(self, revision, indent):
1496
"""Displays the custom properties returned by each registered handler.
1498
If a registered handler raises an error it is propagated.
1500
for line in self.custom_properties(revision):
1501
self.to_file.write("%s%s\n" % (indent, line))
1503
def custom_properties(self, revision):
1504
"""Format the custom properties returned by each registered handler.
1506
If a registered handler raises an error it is propagated.
1508
:return: a list of formatted lines (excluding trailing newlines)
1510
lines = self._foreign_info_properties(revision)
1511
for key, handler in properties_handler_registry.iteritems():
1513
lines.extend(self._format_properties(handler(revision)))
1515
trace.log_exception_quietly()
1516
trace.print_exception(sys.exc_info(), self.to_file)
1519
def _foreign_info_properties(self, rev):
1520
"""Custom log displayer for foreign revision identifiers.
1522
:param rev: Revision object.
1524
# Revision comes directly from a foreign repository
1525
if isinstance(rev, foreign.ForeignRevision):
1526
return self._format_properties(
1527
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1529
# Imported foreign revision revision ids always contain :
1530
if b":" not in rev.revision_id:
1533
# Revision was once imported from a foreign repository
1535
foreign_revid, mapping = \
1536
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1537
except errors.InvalidRevisionId:
1540
return self._format_properties(
1541
mapping.vcs.show_foreign_revid(foreign_revid))
1543
def _format_properties(self, properties):
1545
for key, value in properties.items():
1546
lines.append(key + ': ' + value)
1549
def show_diff(self, to_file, diff, indent):
1550
encoding = get_terminal_encoding()
1551
for l in diff.rstrip().split(b'\n'):
1552
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1555
# Separator between revisions in long format
1556
_LONG_SEP = '-' * 60
1559
class LongLogFormatter(LogFormatter):
1561
supports_merge_revisions = True
1562
preferred_levels = 1
1563
supports_delta = True
1564
supports_tags = True
1565
supports_diff = True
1566
supports_signatures = True
1568
def __init__(self, *args, **kwargs):
1569
super(LongLogFormatter, self).__init__(*args, **kwargs)
1570
if self.show_timezone == 'original':
1571
self.date_string = self._date_string_original_timezone
1573
self.date_string = self._date_string_with_timezone
1575
def _date_string_with_timezone(self, rev):
1576
return format_date(rev.timestamp, rev.timezone or 0,
1579
def _date_string_original_timezone(self, rev):
1580
return format_date_with_offset_in_original_timezone(rev.timestamp,
1583
def log_revision(self, revision):
1584
"""Log a revision, either merged or not."""
1585
indent = ' ' * revision.merge_depth
1587
if revision.revno is not None:
1588
lines.append('revno: %s%s' % (revision.revno,
1589
self.merge_marker(revision)))
1591
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1592
if self.show_ids or revision.revno is None:
1593
lines.append('revision-id: %s' %
1594
(revision.rev.revision_id.decode('utf-8'),))
1596
for parent_id in revision.rev.parent_ids:
1597
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1598
lines.extend(self.custom_properties(revision.rev))
1600
committer = revision.rev.committer
1601
authors = self.authors(revision.rev, 'all')
1602
if authors != [committer]:
1603
lines.append('author: %s' % (", ".join(authors),))
1604
lines.append('committer: %s' % (committer,))
1606
branch_nick = revision.rev.properties.get('branch-nick', None)
1607
if branch_nick is not None:
1608
lines.append('branch nick: %s' % (branch_nick,))
1610
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1612
if revision.signature is not None:
1613
lines.append('signature: ' + revision.signature)
1615
lines.append('message:')
1616
if not revision.rev.message:
1617
lines.append(' (no message)')
1619
message = revision.rev.message.rstrip('\r\n')
1620
for l in message.split('\n'):
1621
lines.append(' %s' % (l,))
1623
# Dump the output, appending the delta and diff if requested
1624
to_file = self.to_file
1625
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1626
if revision.delta is not None:
1627
# Use the standard status output to display changes
1628
from breezy.delta import report_delta
1629
report_delta(to_file, revision.delta, short_status=False,
1630
show_ids=self.show_ids, indent=indent)
1631
if revision.diff is not None:
1632
to_file.write(indent + 'diff:\n')
1634
# Note: we explicitly don't indent the diff (relative to the
1635
# revision information) so that the output can be fed to patch -p0
1636
self.show_diff(self.to_exact_file, revision.diff, indent)
1637
self.to_exact_file.flush()
1639
def get_advice_separator(self):
1640
"""Get the text separating the log from the closing advice."""
1641
return '-' * 60 + '\n'
1644
class ShortLogFormatter(LogFormatter):
1646
supports_merge_revisions = True
1647
preferred_levels = 1
1648
supports_delta = True
1649
supports_tags = True
1650
supports_diff = True
1652
def __init__(self, *args, **kwargs):
1653
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1654
self.revno_width_by_depth = {}
1656
def log_revision(self, revision):
1657
# We need two indents: one per depth and one for the information
1658
# relative to that indent. Most mainline revnos are 5 chars or
1659
# less while dotted revnos are typically 11 chars or less. Once
1660
# calculated, we need to remember the offset for a given depth
1661
# as we might be starting from a dotted revno in the first column
1662
# and we want subsequent mainline revisions to line up.
1663
depth = revision.merge_depth
1664
indent = ' ' * depth
1665
revno_width = self.revno_width_by_depth.get(depth)
1666
if revno_width is None:
1667
if revision.revno is None or revision.revno.find('.') == -1:
1668
# mainline revno, e.g. 12345
1671
# dotted revno, e.g. 12345.10.55
1673
self.revno_width_by_depth[depth] = revno_width
1674
offset = ' ' * (revno_width + 1)
1676
to_file = self.to_file
1679
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1680
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1681
revision.revno or "", self.short_author(
1683
format_date(revision.rev.timestamp,
1684
revision.rev.timezone or 0,
1685
self.show_timezone, date_fmt="%Y-%m-%d",
1687
tags, self.merge_marker(revision)))
1688
self.show_properties(revision.rev, indent + offset)
1689
if self.show_ids or revision.revno is None:
1690
to_file.write(indent + offset + 'revision-id:%s\n'
1691
% (revision.rev.revision_id.decode('utf-8'),))
1692
if not revision.rev.message:
1693
to_file.write(indent + offset + '(no message)\n')
1695
message = revision.rev.message.rstrip('\r\n')
1696
for l in message.split('\n'):
1697
to_file.write(indent + offset + '%s\n' % (l,))
1699
if revision.delta is not None:
1700
# Use the standard status output to display changes
1701
from breezy.delta import report_delta
1702
report_delta(to_file, revision.delta,
1703
short_status=self.delta_format == 1,
1704
show_ids=self.show_ids, indent=indent + offset)
1705
if revision.diff is not None:
1706
self.show_diff(self.to_exact_file, revision.diff, ' ')
1710
class LineLogFormatter(LogFormatter):
1712
supports_merge_revisions = True
1713
preferred_levels = 1
1714
supports_tags = True
1716
def __init__(self, *args, **kwargs):
1717
super(LineLogFormatter, self).__init__(*args, **kwargs)
1718
width = terminal_width()
1719
if width is not None:
1720
# we need one extra space for terminals that wrap on last char
1722
self._max_chars = width
1724
def truncate(self, str, max_len):
1725
if max_len is None or len(str) <= max_len:
1727
return str[:max_len - 3] + '...'
1729
def date_string(self, rev):
1730
return format_date(rev.timestamp, rev.timezone or 0,
1731
self.show_timezone, date_fmt="%Y-%m-%d",
1734
def message(self, rev):
1736
return '(no message)'
1740
def log_revision(self, revision):
1741
indent = ' ' * revision.merge_depth
1742
self.to_file.write(self.log_string(revision.revno, revision.rev,
1743
self._max_chars, revision.tags, indent))
1744
self.to_file.write('\n')
1746
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1747
"""Format log info into one string. Truncate tail of string
1749
:param revno: revision number or None.
1750
Revision numbers counts from 1.
1751
:param rev: revision object
1752
:param max_chars: maximum length of resulting string
1753
:param tags: list of tags or None
1754
:param prefix: string to prefix each line
1755
:return: formatted truncated string
1759
# show revno only when is not None
1760
out.append("%s:" % revno)
1761
if max_chars is not None:
1762
out.append(self.truncate(
1763
self.short_author(rev), (max_chars + 3) // 4))
1765
out.append(self.short_author(rev))
1766
out.append(self.date_string(rev))
1767
if len(rev.parent_ids) > 1:
1768
out.append('[merge]')
1770
tag_str = '{%s}' % (', '.join(sorted(tags)))
1772
out.append(rev.get_summary())
1773
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1776
class GnuChangelogLogFormatter(LogFormatter):
1778
supports_merge_revisions = True
1779
supports_delta = True
1781
def log_revision(self, revision):
1782
"""Log a revision, either merged or not."""
1783
to_file = self.to_file
1785
date_str = format_date(revision.rev.timestamp,
1786
revision.rev.timezone or 0,
1788
date_fmt='%Y-%m-%d',
1790
committer_str = self.authors(revision.rev, 'first', sep=', ')
1791
committer_str = committer_str.replace(' <', ' <')
1792
to_file.write('%s %s\n\n' % (date_str, committer_str))
1794
if revision.delta is not None and revision.delta.has_changed():
1795
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1797
to_file.write('\t* %s:\n' % (path,))
1798
for c in revision.delta.renamed:
1799
oldpath, newpath = c[:2]
1800
# For renamed files, show both the old and the new path
1801
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1804
if not revision.rev.message:
1805
to_file.write('\tNo commit message\n')
1807
message = revision.rev.message.rstrip('\r\n')
1808
for l in message.split('\n'):
1809
to_file.write('\t%s\n' % (l.lstrip(),))
1813
def line_log(rev, max_chars):
1814
lf = LineLogFormatter(None)
1815
return lf.log_string(None, rev, max_chars)
1818
class LogFormatterRegistry(registry.Registry):
1819
"""Registry for log formatters"""
1821
def make_formatter(self, name, *args, **kwargs):
1822
"""Construct a formatter from arguments.
1824
:param name: Name of the formatter to construct. 'short', 'long' and
1825
'line' are built-in.
1827
return self.get(name)(*args, **kwargs)
1829
def get_default(self, branch):
1830
c = branch.get_config_stack()
1831
return self.get(c.get('log_format'))
1834
log_formatter_registry = LogFormatterRegistry()
1837
log_formatter_registry.register('short', ShortLogFormatter,
1838
'Moderately short log format.')
1839
log_formatter_registry.register('long', LongLogFormatter,
1840
'Detailed log format.')
1841
log_formatter_registry.register('line', LineLogFormatter,
1842
'Log format with one line per revision.')
1843
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1844
'Format used by GNU ChangeLog files.')
1847
def register_formatter(name, formatter):
1848
log_formatter_registry.register(name, formatter)
1851
def log_formatter(name, *args, **kwargs):
1852
"""Construct a formatter from arguments.
1854
name -- Name of the formatter to construct; currently 'long', 'short' and
1855
'line' are supported.
1858
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1860
raise errors.BzrCommandError(
1861
gettext("unknown log formatter: %r") % name)
1864
def author_list_all(rev):
1865
return rev.get_apparent_authors()[:]
1868
def author_list_first(rev):
1869
lst = rev.get_apparent_authors()
1876
def author_list_committer(rev):
1877
return [rev.committer]
1880
author_list_registry = registry.Registry()
1882
author_list_registry.register('all', author_list_all,
1885
author_list_registry.register('first', author_list_first,
1888
author_list_registry.register('committer', author_list_committer,
1892
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1894
"""Show the change in revision history comparing the old revision history to the new one.
1896
:param branch: The branch where the revisions exist
1897
:param old_rh: The old revision history
1898
:param new_rh: The new revision history
1899
:param to_file: A file to write the results to. If None, stdout will be used
1902
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1904
lf = log_formatter(log_format,
1907
show_timezone='original')
1909
# This is the first index which is different between
1912
for i in range(max(len(new_rh), len(old_rh))):
1913
if (len(new_rh) <= i
1915
or new_rh[i] != old_rh[i]):
1919
if base_idx is None:
1920
to_file.write('Nothing seems to have changed\n')
1922
# TODO: It might be nice to do something like show_log
1923
# and show the merged entries. But since this is the
1924
# removed revisions, it shouldn't be as important
1925
if base_idx < len(old_rh):
1926
to_file.write('*' * 60)
1927
to_file.write('\nRemoved Revisions:\n')
1928
for i in range(base_idx, len(old_rh)):
1929
rev = branch.repository.get_revision(old_rh[i])
1930
lr = LogRevision(rev, i + 1, 0, None)
1932
to_file.write('*' * 60)
1933
to_file.write('\n\n')
1934
if base_idx < len(new_rh):
1935
to_file.write('Added Revisions:\n')
1939
direction='forward',
1940
start_revision=base_idx + 1,
1941
end_revision=len(new_rh),
1945
def get_history_change(old_revision_id, new_revision_id, repository):
1946
"""Calculate the uncommon lefthand history between two revisions.
1948
:param old_revision_id: The original revision id.
1949
:param new_revision_id: The new revision id.
1950
:param repository: The repository to use for the calculation.
1952
return old_history, new_history
1955
old_revisions = set()
1957
new_revisions = set()
1958
graph = repository.get_graph()
1959
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1960
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1961
stop_revision = None
1964
while do_new or do_old:
1967
new_revision = next(new_iter)
1968
except StopIteration:
1971
new_history.append(new_revision)
1972
new_revisions.add(new_revision)
1973
if new_revision in old_revisions:
1974
stop_revision = new_revision
1978
old_revision = next(old_iter)
1979
except StopIteration:
1982
old_history.append(old_revision)
1983
old_revisions.add(old_revision)
1984
if old_revision in new_revisions:
1985
stop_revision = old_revision
1987
new_history.reverse()
1988
old_history.reverse()
1989
if stop_revision is not None:
1990
new_history = new_history[new_history.index(stop_revision) + 1:]
1991
old_history = old_history[old_history.index(stop_revision) + 1:]
1992
return old_history, new_history
1995
def show_branch_change(branch, output, old_revno, old_revision_id):
1996
"""Show the changes made to a branch.
1998
:param branch: The branch to show changes about.
1999
:param output: A file-like object to write changes to.
2000
:param old_revno: The revno of the old tip.
2001
:param old_revision_id: The revision_id of the old tip.
2003
new_revno, new_revision_id = branch.last_revision_info()
2004
old_history, new_history = get_history_change(old_revision_id,
2007
if old_history == [] and new_history == []:
2008
output.write('Nothing seems to have changed\n')
2011
log_format = log_formatter_registry.get_default(branch)
2012
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2013
if old_history != []:
2014
output.write('*' * 60)
2015
output.write('\nRemoved Revisions:\n')
2016
show_flat_log(branch.repository, old_history, old_revno, lf)
2017
output.write('*' * 60)
2018
output.write('\n\n')
2019
if new_history != []:
2020
output.write('Added Revisions:\n')
2021
start_revno = new_revno - len(new_history) + 1
2022
show_log(branch, lf, verbose=False, direction='forward',
2023
start_revision=start_revno)
2026
def show_flat_log(repository, history, last_revno, lf):
2027
"""Show a simple log of the specified history.
2029
:param repository: The repository to retrieve revisions from.
2030
:param history: A list of revision_ids indicating the lefthand history.
2031
:param last_revno: The revno of the last revision_id in the history.
2032
:param lf: The log formatter to use.
2034
revisions = repository.get_revisions(history)
2035
for i, rev in enumerate(revisions):
2036
lr = LogRevision(rev, i + last_revno, 0, None)
2040
def _get_info_for_log_files(revisionspec_list, file_list, exit_stack):
2041
"""Find file-ids and kinds given a list of files and a revision range.
2043
We search for files at the end of the range. If not found there,
2044
we try the start of the range.
2046
:param revisionspec_list: revision range as parsed on the command line
2047
:param file_list: the list of paths given on the command line;
2048
the first of these can be a branch location or a file path,
2049
the remainder must be file paths
2050
:param exit_stack: When the branch returned is read locked,
2051
an unlock call will be queued to the exit stack.
2052
:return: (branch, info_list, start_rev_info, end_rev_info) where
2053
info_list is a list of (relative_path, file_id, kind) tuples where
2054
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2055
branch will be read-locked.
2057
from breezy.builtins import _get_revision_range
2058
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2060
exit_stack.enter_context(b.lock_read())
2061
# XXX: It's damn messy converting a list of paths to relative paths when
2062
# those paths might be deleted ones, they might be on a case-insensitive
2063
# filesystem and/or they might be in silly locations (like another branch).
2064
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2065
# file2 implicitly in the same dir as file1 or should its directory be
2066
# taken from the current tree somehow?) For now, this solves the common
2067
# case of running log in a nested directory, assuming paths beyond the
2068
# first one haven't been deleted ...
2070
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2072
relpaths = [path] + file_list[1:]
2074
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2076
if relpaths in ([], [u'']):
2077
return b, [], start_rev_info, end_rev_info
2078
if start_rev_info is None and end_rev_info is None:
2080
tree = b.basis_tree()
2083
file_id = tree.path2id(fp)
2084
kind = _get_kind_for_file_id(tree, fp, file_id)
2086
# go back to when time began
2089
rev1 = b.get_rev_id(1)
2090
except errors.NoSuchRevision:
2095
tree1 = b.repository.revision_tree(rev1)
2097
file_id = tree1.path2id(fp)
2098
kind = _get_kind_for_file_id(tree1, fp, file_id)
2099
info_list.append((fp, file_id, kind))
2101
elif start_rev_info == end_rev_info:
2102
# One revision given - file must exist in it
2103
tree = b.repository.revision_tree(end_rev_info.rev_id)
2105
file_id = tree.path2id(fp)
2106
kind = _get_kind_for_file_id(tree, fp, file_id)
2107
info_list.append((fp, file_id, kind))
2110
# Revision range given. Get the file-id from the end tree.
2111
# If that fails, try the start tree.
2112
rev_id = end_rev_info.rev_id
2114
tree = b.basis_tree()
2116
tree = b.repository.revision_tree(rev_id)
2119
file_id = tree.path2id(fp)
2120
kind = _get_kind_for_file_id(tree, fp, file_id)
2123
rev_id = start_rev_info.rev_id
2125
rev1 = b.get_rev_id(1)
2126
tree1 = b.repository.revision_tree(rev1)
2128
tree1 = b.repository.revision_tree(rev_id)
2129
file_id = tree1.path2id(fp)
2130
kind = _get_kind_for_file_id(tree1, fp, file_id)
2131
info_list.append((fp, file_id, kind))
2132
return b, info_list, start_rev_info, end_rev_info
2135
def _get_kind_for_file_id(tree, path, file_id):
2136
"""Return the kind of a file-id or None if it doesn't exist."""
2137
if file_id is not None:
2138
return tree.kind(path)
2143
properties_handler_registry = registry.Registry()
2145
# Use the properties handlers to print out bug information if available
2148
def _bugs_properties_handler(revision):
2150
related_bug_urls = []
2151
for bug_url, status in revision.iter_bugs():
2152
if status == 'fixed':
2153
fixed_bug_urls.append(bug_url)
2154
elif status == 'related':
2155
related_bug_urls.append(bug_url)
2158
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2159
ret[text] = ' '.join(fixed_bug_urls)
2160
if related_bug_urls:
2161
text = ngettext('related bug', 'related bugs',
2162
len(related_bug_urls))
2163
ret[text] = ' '.join(related_bug_urls)
2167
properties_handler_registry.register('bugs_properties_handler',
2168
_bugs_properties_handler)
2171
# adapters which revision ids to log are filtered. When log is called, the
2172
# log_rev_iterator is adapted through each of these factory methods.
2173
# Plugins are welcome to mutate this list in any way they like - as long
2174
# as the overall behaviour is preserved. At this point there is no extensible
2175
# mechanism for getting parameters to each factory method, and until there is
2176
# this won't be considered a stable api.
2180
# read revision objects
2181
_make_revision_objects,
2182
# filter on log messages
2183
_make_search_filter,
2184
# generate deltas for things we will show