1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
68
repository as _mod_repository,
69
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
81
from .osutils import (
83
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
85
get_terminal_encoding,
93
from .tree import find_previous_path
96
def find_touching_revisions(repository, last_revision, last_tree, last_path):
97
"""Yield a description of revisions which affect the file_id.
99
Each returned element is (revno, revision_id, description)
101
This is the list of revisions where the file is either added,
102
modified, renamed or deleted.
104
TODO: Perhaps some way to limit this to only particular revisions,
105
or to traverse a non-mainline set of revisions?
107
last_verifier = last_tree.get_file_verifier(last_path)
108
graph = repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(last_revision, []))
111
for revision_id in history:
112
this_tree = repository.revision_tree(revision_id)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
115
# now we know how it was last time, and how it is in this revision.
116
# are those two states effectively the same or not?
117
if this_path is not None and last_path is None:
118
yield revno, revision_id, "deleted " + this_path
119
this_verifier = this_tree.get_file_verifier(this_path)
120
elif this_path is None and last_path is not None:
121
yield revno, revision_id, "added " + last_path
122
elif this_path != last_path:
123
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
124
this_verifier = this_tree.get_file_verifier(this_path)
126
this_verifier = this_tree.get_file_verifier(this_path)
127
if (this_verifier != last_verifier):
128
yield revno, revision_id, "modified " + this_path
130
last_verifier = this_verifier
131
last_path = this_path
132
last_tree = this_tree
133
if last_path is None:
140
specific_fileid=None,
149
"""Write out human-readable log of commits to this branch.
151
This function is being retained for backwards compatibility but
152
should not be extended with new parameters. Use the new Logger class
153
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
154
make_log_request_dict function.
156
:param lf: The LogFormatter object showing the output.
158
:param specific_fileid: If not None, list only the commits affecting the
159
specified file, rather than all commits.
161
:param verbose: If True show added/changed/deleted/renamed files.
163
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
166
:param start_revision: If not None, only show revisions >= start_revision
168
:param end_revision: If not None, only show revisions <= end_revision
170
:param search: If not None, only show revisions with matching commit
173
:param limit: If set, shows only 'limit' revisions, all revisions are shown
176
:param show_diff: If True, output a diff after each revision.
178
:param match: Dictionary of search lists to use when matching revision
181
# Convert old-style parameters to new-style parameters
182
if specific_fileid is not None:
183
file_ids = [specific_fileid]
188
delta_type = 'partial'
195
diff_type = 'partial'
201
# Build the request and execute it
202
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
203
start_revision=start_revision, end_revision=end_revision,
204
limit=limit, message_search=search,
205
delta_type=delta_type, diff_type=diff_type)
206
Logger(branch, rqst).show(lf)
209
# Note: This needs to be kept in sync with the defaults in
210
# make_log_request_dict() below
211
_DEFAULT_REQUEST_PARAMS = {
212
'direction': 'reverse',
214
'generate_tags': True,
215
'exclude_common_ancestry': False,
216
'_match_using_deltas': True,
220
def make_log_request_dict(direction='reverse', specific_fileids=None,
221
start_revision=None, end_revision=None, limit=None,
222
message_search=None, levels=None, generate_tags=True,
224
diff_type=None, _match_using_deltas=True,
225
exclude_common_ancestry=False, match=None,
226
signature=False, omit_merges=False,
228
"""Convenience function for making a logging request dictionary.
230
Using this function may make code slightly safer by ensuring
231
parameters have the correct names. It also provides a reference
232
point for documenting the supported parameters.
234
:param direction: 'reverse' (default) is latest to earliest;
235
'forward' is earliest to latest.
237
:param specific_fileids: If not None, only include revisions
238
affecting the specified files, rather than all revisions.
240
:param start_revision: If not None, only generate
241
revisions >= start_revision
243
:param end_revision: If not None, only generate
244
revisions <= end_revision
246
:param limit: If set, generate only 'limit' revisions, all revisions
247
are shown if None or 0.
249
:param message_search: If not None, only include revisions with
250
matching commit messages
252
:param levels: the number of levels of revisions to
253
generate; 1 for just the mainline; 0 for all levels, or None for
256
:param generate_tags: If True, include tags for matched revisions.
258
:param delta_type: Either 'full', 'partial' or None.
259
'full' means generate the complete delta - adds/deletes/modifies/etc;
260
'partial' means filter the delta using specific_fileids;
261
None means do not generate any delta.
263
:param diff_type: Either 'full', 'partial' or None.
264
'full' means generate the complete diff - adds/deletes/modifies/etc;
265
'partial' means filter the diff using specific_fileids;
266
None means do not generate any diff.
268
:param _match_using_deltas: a private parameter controlling the
269
algorithm used for matching specific_fileids. This parameter
270
may be removed in the future so breezy client code should NOT
273
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
274
range operator or as a graph difference.
276
:param signature: show digital signature information
278
:param match: Dictionary of list of search strings to use when filtering
279
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
280
the empty string to match any of the preceding properties.
282
:param omit_merges: If True, commits with more than one parent are
286
# Take care of old style message_search parameter
289
if 'message' in match:
290
match['message'].append(message_search)
292
match['message'] = [message_search]
294
match={ 'message': [message_search] }
296
'direction': direction,
297
'specific_fileids': specific_fileids,
298
'start_revision': start_revision,
299
'end_revision': end_revision,
302
'generate_tags': generate_tags,
303
'delta_type': delta_type,
304
'diff_type': diff_type,
305
'exclude_common_ancestry': exclude_common_ancestry,
306
'signature': signature,
308
'omit_merges': omit_merges,
309
# Add 'private' attributes for features that may be deprecated
310
'_match_using_deltas': _match_using_deltas,
314
def _apply_log_request_defaults(rqst):
315
"""Apply default values to a request dictionary."""
316
result = _DEFAULT_REQUEST_PARAMS.copy()
322
def format_signature_validity(rev_id, branch):
323
"""get the signature validity
325
:param rev_id: revision id to validate
326
:param branch: branch of revision
327
:return: human readable string to print to log
329
from breezy import gpg
331
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
332
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
333
if result[0] == gpg.SIGNATURE_VALID:
334
return u"valid signature from {0}".format(result[1])
335
if result[0] == gpg.SIGNATURE_KEY_MISSING:
336
return "unknown key {0}".format(result[1])
337
if result[0] == gpg.SIGNATURE_NOT_VALID:
338
return "invalid signature!"
339
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
340
return "no signature"
343
class LogGenerator(object):
344
"""A generator of log revisions."""
346
def iter_log_revisions(self):
347
"""Iterate over LogRevision objects.
349
:return: An iterator yielding LogRevision objects.
351
raise NotImplementedError(self.iter_log_revisions)
354
class Logger(object):
355
"""An object that generates, formats and displays a log."""
357
def __init__(self, branch, rqst):
360
:param branch: the branch to log
361
:param rqst: A dictionary specifying the query parameters.
362
See make_log_request_dict() for supported values.
365
self.rqst = _apply_log_request_defaults(rqst)
370
:param lf: The LogFormatter object to send the output to.
372
if not isinstance(lf, LogFormatter):
373
warn("not a LogFormatter instance: %r" % lf)
375
self.branch.lock_read()
377
if getattr(lf, 'begin_log', None):
380
if getattr(lf, 'end_log', None):
385
def _show_body(self, lf):
386
"""Show the main log output.
388
Subclasses may wish to override this.
390
# Tweak the LogRequest based on what the LogFormatter can handle.
391
# (There's no point generating stuff if the formatter can't display it.)
393
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
394
# user didn't specify levels, use whatever the LF can handle:
395
rqst['levels'] = lf.get_levels()
397
if not getattr(lf, 'supports_tags', False):
398
rqst['generate_tags'] = False
399
if not getattr(lf, 'supports_delta', False):
400
rqst['delta_type'] = None
401
if not getattr(lf, 'supports_diff', False):
402
rqst['diff_type'] = None
403
if not getattr(lf, 'supports_signatures', False):
404
rqst['signature'] = False
406
# Find and print the interesting revisions
407
generator = self._generator_factory(self.branch, rqst)
409
for lr in generator.iter_log_revisions():
411
except errors.GhostRevisionUnusableHere:
412
raise errors.BzrCommandError(
413
gettext('Further revision history missing.'))
416
def _generator_factory(self, branch, rqst):
417
"""Make the LogGenerator object to use.
419
Subclasses may wish to override this.
421
return _DefaultLogGenerator(branch, rqst)
424
class _StartNotLinearAncestor(Exception):
425
"""Raised when a start revision is not found walking left-hand history."""
428
class _DefaultLogGenerator(LogGenerator):
429
"""The default generator of log revisions."""
431
def __init__(self, branch, rqst):
434
if rqst.get('generate_tags') and branch.supports_tags():
435
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
437
self.rev_tag_dict = {}
439
def iter_log_revisions(self):
440
"""Iterate over LogRevision objects.
442
:return: An iterator yielding LogRevision objects.
445
levels = rqst.get('levels')
446
limit = rqst.get('limit')
447
diff_type = rqst.get('diff_type')
448
show_signature = rqst.get('signature')
449
omit_merges = rqst.get('omit_merges')
451
revision_iterator = self._create_log_revision_iterator()
452
for revs in revision_iterator:
453
for (rev_id, revno, merge_depth), rev, delta in revs:
454
# 0 levels means show everything; merge_depth counts from 0
455
if levels != 0 and merge_depth >= levels:
457
if omit_merges and len(rev.parent_ids) > 1:
460
raise errors.GhostRevisionUnusableHere(rev_id)
461
if diff_type is None:
464
diff = self._format_diff(rev, rev_id, diff_type)
466
signature = format_signature_validity(rev_id, self.branch)
469
yield LogRevision(rev, revno, merge_depth, delta,
470
self.rev_tag_dict.get(rev_id), diff, signature)
473
if log_count >= limit:
476
def _format_diff(self, rev, rev_id, diff_type):
477
repo = self.branch.repository
478
if len(rev.parent_ids) == 0:
479
ancestor_id = _mod_revision.NULL_REVISION
481
ancestor_id = rev.parent_ids[0]
482
tree_1 = repo.revision_tree(ancestor_id)
483
tree_2 = repo.revision_tree(rev_id)
484
file_ids = self.rqst.get('specific_fileids')
485
if diff_type == 'partial' and file_ids is not None:
486
specific_files = [tree_2.id2path(id) for id in file_ids]
488
specific_files = None
490
path_encoding = get_diff_header_encoding()
491
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
492
new_label='', path_encoding=path_encoding)
495
def _create_log_revision_iterator(self):
496
"""Create a revision iterator for log.
498
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
501
self.start_rev_id, self.end_rev_id = _get_revision_limits(
502
self.branch, self.rqst.get('start_revision'),
503
self.rqst.get('end_revision'))
504
if self.rqst.get('_match_using_deltas'):
505
return self._log_revision_iterator_using_delta_matching()
507
# We're using the per-file-graph algorithm. This scales really
508
# well but only makes sense if there is a single file and it's
510
file_count = len(self.rqst.get('specific_fileids'))
512
raise BzrError("illegal LogRequest: must match-using-deltas "
513
"when logging %d files" % file_count)
514
return self._log_revision_iterator_using_per_file_graph()
516
def _log_revision_iterator_using_delta_matching(self):
517
# Get the base revisions, filtering by the revision range
519
generate_merge_revisions = rqst.get('levels') != 1
520
delayed_graph_generation = not rqst.get('specific_fileids') and (
521
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
522
view_revisions = _calc_view_revisions(
523
self.branch, self.start_rev_id, self.end_rev_id,
524
rqst.get('direction'),
525
generate_merge_revisions=generate_merge_revisions,
526
delayed_graph_generation=delayed_graph_generation,
527
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
529
# Apply the other filters
530
return make_log_rev_iterator(self.branch, view_revisions,
531
rqst.get('delta_type'), rqst.get('match'),
532
file_ids=rqst.get('specific_fileids'),
533
direction=rqst.get('direction'))
535
def _log_revision_iterator_using_per_file_graph(self):
536
# Get the base revisions, filtering by the revision range.
537
# Note that we always generate the merge revisions because
538
# filter_revisions_touching_file_id() requires them ...
540
view_revisions = _calc_view_revisions(
541
self.branch, self.start_rev_id, self.end_rev_id,
542
rqst.get('direction'), generate_merge_revisions=True,
543
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
544
if not isinstance(view_revisions, list):
545
view_revisions = list(view_revisions)
546
view_revisions = _filter_revisions_touching_file_id(self.branch,
547
rqst.get('specific_fileids')[0], view_revisions,
548
include_merges=rqst.get('levels') != 1)
549
return make_log_rev_iterator(self.branch, view_revisions,
550
rqst.get('delta_type'), rqst.get('match'))
553
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
554
generate_merge_revisions,
555
delayed_graph_generation=False,
556
exclude_common_ancestry=False,
558
"""Calculate the revisions to view.
560
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
561
a list of the same tuples.
563
if (exclude_common_ancestry and start_rev_id == end_rev_id):
564
raise errors.BzrCommandError(gettext(
565
'--exclude-common-ancestry requires two different revisions'))
566
if direction not in ('reverse', 'forward'):
567
raise ValueError(gettext('invalid direction %r') % direction)
568
br_revno, br_rev_id = branch.last_revision_info()
572
if (end_rev_id and start_rev_id == end_rev_id
573
and (not generate_merge_revisions
574
or not _has_merges(branch, end_rev_id))):
575
# If a single revision is requested, check we can handle it
576
return _generate_one_revision(branch, end_rev_id, br_rev_id,
578
if not generate_merge_revisions:
580
# If we only want to see linear revisions, we can iterate ...
581
iter_revs = _linear_view_revisions(
582
branch, start_rev_id, end_rev_id,
583
exclude_common_ancestry=exclude_common_ancestry)
584
# If a start limit was given and it's not obviously an
585
# ancestor of the end limit, check it before outputting anything
586
if (direction == 'forward'
587
or (start_rev_id and not _is_obvious_ancestor(
588
branch, start_rev_id, end_rev_id))):
589
iter_revs = list(iter_revs)
590
if direction == 'forward':
591
iter_revs = reversed(iter_revs)
593
except _StartNotLinearAncestor:
594
# Switch to the slower implementation that may be able to find a
595
# non-obvious ancestor out of the left-hand history.
597
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
598
direction, delayed_graph_generation,
599
exclude_common_ancestry)
600
if direction == 'forward':
601
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
605
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
606
if rev_id == br_rev_id:
608
return [(br_rev_id, br_revno, 0)]
610
revno_str = _compute_revno_str(branch, rev_id)
611
return [(rev_id, revno_str, 0)]
614
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
615
delayed_graph_generation,
616
exclude_common_ancestry=False):
617
# On large trees, generating the merge graph can take 30-60 seconds
618
# so we delay doing it until a merge is detected, incrementally
619
# returning initial (non-merge) revisions while we can.
621
# The above is only true for old formats (<= 0.92), for newer formats, a
622
# couple of seconds only should be needed to load the whole graph and the
623
# other graph operations needed are even faster than that -- vila 100201
624
initial_revisions = []
625
if delayed_graph_generation:
627
for rev_id, revno, depth in _linear_view_revisions(
628
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
629
if _has_merges(branch, rev_id):
630
# The end_rev_id can be nested down somewhere. We need an
631
# explicit ancestry check. There is an ambiguity here as we
632
# may not raise _StartNotLinearAncestor for a revision that
633
# is an ancestor but not a *linear* one. But since we have
634
# loaded the graph to do the check (or calculate a dotted
635
# revno), we may as well accept to show the log... We need
636
# the check only if start_rev_id is not None as all
637
# revisions have _mod_revision.NULL_REVISION as an ancestor
639
graph = branch.repository.get_graph()
640
if (start_rev_id is not None
641
and not graph.is_ancestor(start_rev_id, end_rev_id)):
642
raise _StartNotLinearAncestor()
643
# Since we collected the revisions so far, we need to
648
initial_revisions.append((rev_id, revno, depth))
650
# No merged revisions found
651
return initial_revisions
652
except _StartNotLinearAncestor:
653
# A merge was never detected so the lower revision limit can't
654
# be nested down somewhere
655
raise errors.BzrCommandError(gettext('Start revision not found in'
656
' history of end revision.'))
658
# We exit the loop above because we encounter a revision with merges, from
659
# this revision, we need to switch to _graph_view_revisions.
661
# A log including nested merges is required. If the direction is reverse,
662
# we rebase the initial merge depths so that the development line is
663
# shown naturally, i.e. just like it is for linear logging. We can easily
664
# make forward the exact opposite display, but showing the merge revisions
665
# indented at the end seems slightly nicer in that case.
666
view_revisions = itertools.chain(iter(initial_revisions),
667
_graph_view_revisions(branch, start_rev_id, end_rev_id,
668
rebase_initial_depths=(direction == 'reverse'),
669
exclude_common_ancestry=exclude_common_ancestry))
670
return view_revisions
673
def _has_merges(branch, rev_id):
674
"""Does a revision have multiple parents or not?"""
675
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
676
return len(parents) > 1
679
def _compute_revno_str(branch, rev_id):
680
"""Compute the revno string from a rev_id.
682
:return: The revno string, or None if the revision is not in the supplied
686
revno = branch.revision_id_to_dotted_revno(rev_id)
687
except errors.NoSuchRevision:
688
# The revision must be outside of this branch
691
return '.'.join(str(n) for n in revno)
694
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
695
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
696
if start_rev_id and end_rev_id:
698
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
699
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
700
except errors.NoSuchRevision:
701
# one or both is not in the branch; not obvious
703
if len(start_dotted) == 1 and len(end_dotted) == 1:
705
return start_dotted[0] <= end_dotted[0]
706
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
707
start_dotted[0:1] == end_dotted[0:1]):
708
# both on same development line
709
return start_dotted[2] <= end_dotted[2]
713
# if either start or end is not specified then we use either the first or
714
# the last revision and *they* are obvious ancestors.
718
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
719
exclude_common_ancestry=False):
720
"""Calculate a sequence of revisions to view, newest to oldest.
722
:param start_rev_id: the lower revision-id
723
:param end_rev_id: the upper revision-id
724
:param exclude_common_ancestry: Whether the start_rev_id should be part of
725
the iterated revisions.
726
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
727
dotted_revno will be None for ghosts
728
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
729
is not found walking the left-hand history
731
br_revno, br_rev_id = branch.last_revision_info()
732
repo = branch.repository
733
graph = repo.get_graph()
734
if start_rev_id is None and end_rev_id is None:
736
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
737
(_mod_revision.NULL_REVISION,))
740
revision_id = next(graph_iter)
741
except errors.RevisionNotPresent as e:
743
yield e.revision_id, None, None
746
yield revision_id, str(cur_revno), 0
749
if end_rev_id is None:
750
end_rev_id = br_rev_id
751
found_start = start_rev_id is None
752
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
753
(_mod_revision.NULL_REVISION,))
756
revision_id = next(graph_iter)
757
except StopIteration:
759
except errors.RevisionNotPresent as e:
761
yield e.revision_id, None, None
764
revno_str = _compute_revno_str(branch, revision_id)
765
if not found_start and revision_id == start_rev_id:
766
if not exclude_common_ancestry:
767
yield revision_id, revno_str, 0
771
yield revision_id, revno_str, 0
773
raise _StartNotLinearAncestor()
776
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
777
rebase_initial_depths=True,
778
exclude_common_ancestry=False):
779
"""Calculate revisions to view including merges, newest to oldest.
781
:param branch: the branch
782
:param start_rev_id: the lower revision-id
783
:param end_rev_id: the upper revision-id
784
:param rebase_initial_depth: should depths be rebased until a mainline
786
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
788
if exclude_common_ancestry:
789
stop_rule = 'with-merges-without-common-ancestry'
791
stop_rule = 'with-merges'
792
view_revisions = branch.iter_merge_sorted_revisions(
793
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
795
if not rebase_initial_depths:
796
for (rev_id, merge_depth, revno, end_of_merge
798
yield rev_id, '.'.join(map(str, revno)), merge_depth
800
# We're following a development line starting at a merged revision.
801
# We need to adjust depths down by the initial depth until we find
802
# a depth less than it. Then we use that depth as the adjustment.
803
# If and when we reach the mainline, depth adjustment ends.
804
depth_adjustment = None
805
for (rev_id, merge_depth, revno, end_of_merge
807
if depth_adjustment is None:
808
depth_adjustment = merge_depth
810
if merge_depth < depth_adjustment:
811
# From now on we reduce the depth adjustement, this can be
812
# surprising for users. The alternative requires two passes
813
# which breaks the fast display of the first revision
815
depth_adjustment = merge_depth
816
merge_depth -= depth_adjustment
817
yield rev_id, '.'.join(map(str, revno)), merge_depth
820
def _rebase_merge_depth(view_revisions):
821
"""Adjust depths upwards so the top level is 0."""
822
# If either the first or last revision have a merge_depth of 0, we're done
823
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
824
min_depth = min([d for r, n, d in view_revisions])
826
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
827
return view_revisions
830
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
831
file_ids=None, direction='reverse'):
832
"""Create a revision iterator for log.
834
:param branch: The branch being logged.
835
:param view_revisions: The revisions being viewed.
836
:param generate_delta: Whether to generate a delta for each revision.
837
Permitted values are None, 'full' and 'partial'.
838
:param search: A user text search string.
839
:param file_ids: If non empty, only revisions matching one or more of
840
the file-ids are to be kept.
841
:param direction: the direction in which view_revisions is sorted
842
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
845
# Convert view_revisions into (view, None, None) groups to fit with
846
# the standard interface here.
847
if isinstance(view_revisions, list):
848
# A single batch conversion is faster than many incremental ones.
849
# As we have all the data, do a batch conversion.
850
nones = [None] * len(view_revisions)
851
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
854
for view in view_revisions:
855
yield (view, None, None)
856
log_rev_iterator = iter([_convert()])
857
for adapter in log_adapters:
858
# It would be nicer if log adapters were first class objects
859
# with custom parameters. This will do for now. IGC 20090127
860
if adapter == _make_delta_filter:
861
log_rev_iterator = adapter(branch, generate_delta,
862
search, log_rev_iterator, file_ids, direction)
864
log_rev_iterator = adapter(branch, generate_delta,
865
search, log_rev_iterator)
866
return log_rev_iterator
869
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
870
"""Create a filtered iterator of log_rev_iterator matching on a regex.
872
:param branch: The branch being logged.
873
:param generate_delta: Whether to generate a delta for each revision.
874
:param match: A dictionary with properties as keys and lists of strings
875
as values. To match, a revision may match any of the supplied strings
876
within a single property but must match at least one string for each
878
:param log_rev_iterator: An input iterator containing all revisions that
879
could be displayed, in lists.
880
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
884
return log_rev_iterator
885
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
886
for k, v in match.items()]
887
return _filter_re(searchRE, log_rev_iterator)
890
def _filter_re(searchRE, log_rev_iterator):
891
for revs in log_rev_iterator:
892
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
896
def _match_filter(searchRE, rev):
898
'message': (rev.message,),
899
'committer': (rev.committer,),
900
'author': (rev.get_apparent_authors()),
901
'bugs': list(rev.iter_bugs())
903
strings[''] = [item for inner_list in strings.values()
904
for item in inner_list]
905
for (k, v) in searchRE:
906
if k in strings and not _match_any_filter(strings[k], v):
910
def _match_any_filter(strings, res):
911
return any(re.search(s) for re in res for s in strings)
913
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
914
fileids=None, direction='reverse'):
915
"""Add revision deltas to a log iterator if needed.
917
:param branch: The branch being logged.
918
:param generate_delta: Whether to generate a delta for each revision.
919
Permitted values are None, 'full' and 'partial'.
920
:param search: A user text search string.
921
:param log_rev_iterator: An input iterator containing all revisions that
922
could be displayed, in lists.
923
:param fileids: If non empty, only revisions matching one or more of
924
the file-ids are to be kept.
925
:param direction: the direction in which view_revisions is sorted
926
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
929
if not generate_delta and not fileids:
930
return log_rev_iterator
931
return _generate_deltas(branch.repository, log_rev_iterator,
932
generate_delta, fileids, direction)
935
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
937
"""Create deltas for each batch of revisions in log_rev_iterator.
939
If we're only generating deltas for the sake of filtering against
940
file-ids, we stop generating deltas once all file-ids reach the
941
appropriate life-cycle point. If we're receiving data newest to
942
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
944
check_fileids = fileids is not None and len(fileids) > 0
946
fileid_set = set(fileids)
947
if direction == 'reverse':
953
for revs in log_rev_iterator:
954
# If we were matching against fileids and we've run out,
955
# there's nothing left to do
956
if check_fileids and not fileid_set:
958
revisions = [rev[1] for rev in revs]
960
if delta_type == 'full' and not check_fileids:
961
deltas = repository.get_deltas_for_revisions(revisions)
962
for rev, delta in zip(revs, deltas):
963
new_revs.append((rev[0], rev[1], delta))
965
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
966
for rev, delta in zip(revs, deltas):
968
if delta is None or not delta.has_changed():
971
_update_fileids(delta, fileid_set, stop_on)
972
if delta_type is None:
974
elif delta_type == 'full':
975
# If the file matches all the time, rebuilding
976
# a full delta like this in addition to a partial
977
# one could be slow. However, it's likely that
978
# most revisions won't get this far, making it
979
# faster to filter on the partial deltas and
980
# build the occasional full delta than always
981
# building full deltas and filtering those.
983
delta = repository.get_revision_delta(rev_id)
984
new_revs.append((rev[0], rev[1], delta))
988
def _update_fileids(delta, fileids, stop_on):
989
"""Update the set of file-ids to search based on file lifecycle events.
991
:param fileids: a set of fileids to update
992
:param stop_on: either 'add' or 'remove' - take file-ids out of the
993
fileids set once their add or remove entry is detected respectively
996
for item in delta.added:
997
if item[1] in fileids:
998
fileids.remove(item[1])
999
elif stop_on == 'delete':
1000
for item in delta.removed:
1001
if item[1] in fileids:
1002
fileids.remove(item[1])
1005
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1006
"""Extract revision objects from the repository
1008
:param branch: The branch being logged.
1009
:param generate_delta: Whether to generate a delta for each revision.
1010
:param search: A user text search string.
1011
:param log_rev_iterator: An input iterator containing all revisions that
1012
could be displayed, in lists.
1013
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1016
repository = branch.repository
1017
for revs in log_rev_iterator:
1018
# r = revision_id, n = revno, d = merge depth
1019
revision_ids = [view[0] for view, _, _ in revs]
1020
revisions = dict(repository.iter_revisions(revision_ids))
1021
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1024
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1025
"""Group up a single large batch into smaller ones.
1027
:param branch: The branch being logged.
1028
:param generate_delta: Whether to generate a delta for each revision.
1029
:param search: A user text search string.
1030
:param log_rev_iterator: An input iterator containing all revisions that
1031
could be displayed, in lists.
1032
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1036
for batch in log_rev_iterator:
1039
step = [detail for _, detail in zip(range(num), batch)]
1043
num = min(int(num * 1.5), 200)
1046
def _get_revision_limits(branch, start_revision, end_revision):
1047
"""Get and check revision limits.
1049
:param branch: The branch containing the revisions.
1051
:param start_revision: The first revision to be logged.
1052
For backwards compatibility this may be a mainline integer revno,
1053
but for merge revision support a RevisionInfo is expected.
1055
:param end_revision: The last revision to be logged.
1056
For backwards compatibility this may be a mainline integer revno,
1057
but for merge revision support a RevisionInfo is expected.
1059
:return: (start_rev_id, end_rev_id) tuple.
1061
branch_revno, branch_rev_id = branch.last_revision_info()
1063
if start_revision is None:
1066
if isinstance(start_revision, revisionspec.RevisionInfo):
1067
start_rev_id = start_revision.rev_id
1068
start_revno = start_revision.revno or 1
1070
branch.check_real_revno(start_revision)
1071
start_revno = start_revision
1072
start_rev_id = branch.get_rev_id(start_revno)
1075
if end_revision is None:
1076
end_revno = branch_revno
1078
if isinstance(end_revision, revisionspec.RevisionInfo):
1079
end_rev_id = end_revision.rev_id
1080
end_revno = end_revision.revno or branch_revno
1082
branch.check_real_revno(end_revision)
1083
end_revno = end_revision
1084
end_rev_id = branch.get_rev_id(end_revno)
1086
if branch_revno != 0:
1087
if (start_rev_id == _mod_revision.NULL_REVISION
1088
or end_rev_id == _mod_revision.NULL_REVISION):
1089
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1090
if start_revno > end_revno:
1091
raise errors.BzrCommandError(gettext("Start revision must be "
1092
"older than the end revision."))
1093
return (start_rev_id, end_rev_id)
1096
def _get_mainline_revs(branch, start_revision, end_revision):
1097
"""Get the mainline revisions from the branch.
1099
Generates the list of mainline revisions for the branch.
1101
:param branch: The branch containing the revisions.
1103
:param start_revision: The first revision to be logged.
1104
For backwards compatibility this may be a mainline integer revno,
1105
but for merge revision support a RevisionInfo is expected.
1107
:param end_revision: The last revision to be logged.
1108
For backwards compatibility this may be a mainline integer revno,
1109
but for merge revision support a RevisionInfo is expected.
1111
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1113
branch_revno, branch_last_revision = branch.last_revision_info()
1114
if branch_revno == 0:
1115
return None, None, None, None
1117
# For mainline generation, map start_revision and end_revision to
1118
# mainline revnos. If the revision is not on the mainline choose the
1119
# appropriate extreme of the mainline instead - the extra will be
1121
# Also map the revisions to rev_ids, to be used in the later filtering
1124
if start_revision is None:
1127
if isinstance(start_revision, revisionspec.RevisionInfo):
1128
start_rev_id = start_revision.rev_id
1129
start_revno = start_revision.revno or 1
1131
branch.check_real_revno(start_revision)
1132
start_revno = start_revision
1135
if end_revision is None:
1136
end_revno = branch_revno
1138
if isinstance(end_revision, revisionspec.RevisionInfo):
1139
end_rev_id = end_revision.rev_id
1140
end_revno = end_revision.revno or branch_revno
1142
branch.check_real_revno(end_revision)
1143
end_revno = end_revision
1145
if ((start_rev_id == _mod_revision.NULL_REVISION)
1146
or (end_rev_id == _mod_revision.NULL_REVISION)):
1147
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1148
if start_revno > end_revno:
1149
raise errors.BzrCommandError(gettext("Start revision must be older "
1150
"than the end revision."))
1152
if end_revno < start_revno:
1153
return None, None, None, None
1154
cur_revno = branch_revno
1157
graph = branch.repository.get_graph()
1158
for revision_id in graph.iter_lefthand_ancestry(
1159
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1160
if cur_revno < start_revno:
1161
# We have gone far enough, but we always add 1 more revision
1162
rev_nos[revision_id] = cur_revno
1163
mainline_revs.append(revision_id)
1165
if cur_revno <= end_revno:
1166
rev_nos[revision_id] = cur_revno
1167
mainline_revs.append(revision_id)
1170
# We walked off the edge of all revisions, so we add a 'None' marker
1171
mainline_revs.append(None)
1173
mainline_revs.reverse()
1175
# override the mainline to look like the revision history.
1176
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1179
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1180
include_merges=True):
1181
r"""Return the list of revision ids which touch a given file id.
1183
The function filters view_revisions and returns a subset.
1184
This includes the revisions which directly change the file id,
1185
and the revisions which merge these changes. So if the
1198
And 'C' changes a file, then both C and D will be returned. F will not be
1199
returned even though it brings the changes to C into the branch starting
1200
with E. (Note that if we were using F as the tip instead of G, then we
1203
This will also be restricted based on a subset of the mainline.
1205
:param branch: The branch where we can get text revision information.
1207
:param file_id: Filter out revisions that do not touch file_id.
1209
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1210
tuples. This is the list of revisions which will be filtered. It is
1211
assumed that view_revisions is in merge_sort order (i.e. newest
1214
:param include_merges: include merge revisions in the result or not
1216
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1218
# Lookup all possible text keys to determine which ones actually modified
1220
graph = branch.repository.get_file_graph()
1221
get_parent_map = graph.get_parent_map
1222
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1224
# Looking up keys in batches of 1000 can cut the time in half, as well as
1225
# memory consumption. GraphIndex *does* like to look for a few keys in
1226
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1227
# TODO: This code needs to be re-evaluated periodically as we tune the
1228
# indexing layer. We might consider passing in hints as to the known
1229
# access pattern (sparse/clustered, high success rate/low success
1230
# rate). This particular access is clustered with a low success rate.
1231
modified_text_revisions = set()
1233
for start in range(0, len(text_keys), chunk_size):
1234
next_keys = text_keys[start:start + chunk_size]
1235
# Only keep the revision_id portion of the key
1236
modified_text_revisions.update(
1237
[k[1] for k in get_parent_map(next_keys)])
1238
del text_keys, next_keys
1241
# Track what revisions will merge the current revision, replace entries
1242
# with 'None' when they have been added to result
1243
current_merge_stack = [None]
1244
for info in view_revisions:
1245
rev_id, revno, depth = info
1246
if depth == len(current_merge_stack):
1247
current_merge_stack.append(info)
1249
del current_merge_stack[depth + 1:]
1250
current_merge_stack[-1] = info
1252
if rev_id in modified_text_revisions:
1253
# This needs to be logged, along with the extra revisions
1254
for idx in range(len(current_merge_stack)):
1255
node = current_merge_stack[idx]
1256
if node is not None:
1257
if include_merges or node[2] == 0:
1259
current_merge_stack[idx] = None
1263
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1264
"""Reverse revisions by depth.
1266
Revisions with a different depth are sorted as a group with the previous
1267
revision of that depth. There may be no topological justification for this,
1268
but it looks much nicer.
1270
# Add a fake revision at start so that we can always attach sub revisions
1271
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1273
for val in merge_sorted_revisions:
1274
if val[2] == _depth:
1275
# Each revision at the current depth becomes a chunk grouping all
1276
# higher depth revisions.
1277
zd_revisions.append([val])
1279
zd_revisions[-1].append(val)
1280
for revisions in zd_revisions:
1281
if len(revisions) > 1:
1282
# We have higher depth revisions, let reverse them locally
1283
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1284
zd_revisions.reverse()
1286
for chunk in zd_revisions:
1287
result.extend(chunk)
1289
# Top level call, get rid of the fake revisions that have been added
1290
result = [r for r in result if r[0] is not None and r[1] is not None]
1294
class LogRevision(object):
1295
"""A revision to be logged (by LogFormatter.log_revision).
1297
A simple wrapper for the attributes of a revision to be logged.
1298
The attributes may or may not be populated, as determined by the
1299
logging options and the log formatter capabilities.
1302
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1303
tags=None, diff=None, signature=None):
1308
self.revno = str(revno)
1309
self.merge_depth = merge_depth
1313
self.signature = signature
1316
class LogFormatter(object):
1317
"""Abstract class to display log messages.
1319
At a minimum, a derived class must implement the log_revision method.
1321
If the LogFormatter needs to be informed of the beginning or end of
1322
a log it should implement the begin_log and/or end_log hook methods.
1324
A LogFormatter should define the following supports_XXX flags
1325
to indicate which LogRevision attributes it supports:
1327
- supports_delta must be True if this log formatter supports delta.
1328
Otherwise the delta attribute may not be populated. The 'delta_format'
1329
attribute describes whether the 'short_status' format (1) or the long
1330
one (2) should be used.
1332
- supports_merge_revisions must be True if this log formatter supports
1333
merge revisions. If not, then only mainline revisions will be passed
1336
- preferred_levels is the number of levels this formatter defaults to.
1337
The default value is zero meaning display all levels.
1338
This value is only relevant if supports_merge_revisions is True.
1340
- supports_tags must be True if this log formatter supports tags.
1341
Otherwise the tags attribute may not be populated.
1343
- supports_diff must be True if this log formatter supports diffs.
1344
Otherwise the diff attribute may not be populated.
1346
- supports_signatures must be True if this log formatter supports GPG
1349
Plugins can register functions to show custom revision properties using
1350
the properties_handler_registry. The registered function
1351
must respect the following interface description::
1353
def my_show_properties(properties_dict):
1354
# code that returns a dict {'name':'value'} of the properties
1357
preferred_levels = 0
1359
def __init__(self, to_file, show_ids=False, show_timezone='original',
1360
delta_format=None, levels=None, show_advice=False,
1361
to_exact_file=None, author_list_handler=None):
1362
"""Create a LogFormatter.
1364
:param to_file: the file to output to
1365
:param to_exact_file: if set, gives an output stream to which
1366
non-Unicode diffs are written.
1367
:param show_ids: if True, revision-ids are to be displayed
1368
:param show_timezone: the timezone to use
1369
:param delta_format: the level of delta information to display
1370
or None to leave it to the formatter to decide
1371
:param levels: the number of levels to display; None or -1 to
1372
let the log formatter decide.
1373
:param show_advice: whether to show advice at the end of the
1375
:param author_list_handler: callable generating a list of
1376
authors to display for a given revision
1378
self.to_file = to_file
1379
# 'exact' stream used to show diff, it should print content 'as is'
1380
# and should not try to decode/encode it to unicode to avoid bug #328007
1381
if to_exact_file is not None:
1382
self.to_exact_file = to_exact_file
1384
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1385
# for code that expects to get diffs to pass in the exact file
1387
self.to_exact_file = getattr(to_file, 'stream', to_file)
1388
self.show_ids = show_ids
1389
self.show_timezone = show_timezone
1390
if delta_format is None:
1391
# Ensures backward compatibility
1392
delta_format = 2 # long format
1393
self.delta_format = delta_format
1394
self.levels = levels
1395
self._show_advice = show_advice
1396
self._merge_count = 0
1397
self._author_list_handler = author_list_handler
1399
def get_levels(self):
1400
"""Get the number of levels to display or 0 for all."""
1401
if getattr(self, 'supports_merge_revisions', False):
1402
if self.levels is None or self.levels == -1:
1403
self.levels = self.preferred_levels
1408
def log_revision(self, revision):
1411
:param revision: The LogRevision to be logged.
1413
raise NotImplementedError('not implemented in abstract base')
1415
def show_advice(self):
1416
"""Output user advice, if any, when the log is completed."""
1417
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1418
advice_sep = self.get_advice_separator()
1420
self.to_file.write(advice_sep)
1422
"Use --include-merged or -n0 to see merged revisions.\n")
1424
def get_advice_separator(self):
1425
"""Get the text separating the log from the closing advice."""
1428
def short_committer(self, rev):
1429
name, address = config.parse_username(rev.committer)
1434
def short_author(self, rev):
1435
return self.authors(rev, 'first', short=True, sep=', ')
1437
def authors(self, rev, who, short=False, sep=None):
1438
"""Generate list of authors, taking --authors option into account.
1440
The caller has to specify the name of a author list handler,
1441
as provided by the author list registry, using the ``who``
1442
argument. That name only sets a default, though: when the
1443
user selected a different author list generation using the
1444
``--authors`` command line switch, as represented by the
1445
``author_list_handler`` constructor argument, that value takes
1448
:param rev: The revision for which to generate the list of authors.
1449
:param who: Name of the default handler.
1450
:param short: Whether to shorten names to either name or address.
1451
:param sep: What separator to use for automatic concatenation.
1453
if self._author_list_handler is not None:
1454
# The user did specify --authors, which overrides the default
1455
author_list_handler = self._author_list_handler
1457
# The user didn't specify --authors, so we use the caller's default
1458
author_list_handler = author_list_registry.get(who)
1459
names = author_list_handler(rev)
1461
for i in range(len(names)):
1462
name, address = config.parse_username(names[i])
1468
names = sep.join(names)
1471
def merge_marker(self, revision):
1472
"""Get the merge marker to include in the output or '' if none."""
1473
if len(revision.rev.parent_ids) > 1:
1474
self._merge_count += 1
1479
def show_properties(self, revision, indent):
1480
"""Displays the custom properties returned by each registered handler.
1482
If a registered handler raises an error it is propagated.
1484
for line in self.custom_properties(revision):
1485
self.to_file.write("%s%s\n" % (indent, line))
1487
def custom_properties(self, revision):
1488
"""Format the custom properties returned by each registered handler.
1490
If a registered handler raises an error it is propagated.
1492
:return: a list of formatted lines (excluding trailing newlines)
1494
lines = self._foreign_info_properties(revision)
1495
for key, handler in properties_handler_registry.iteritems():
1496
lines.extend(self._format_properties(handler(revision)))
1499
def _foreign_info_properties(self, rev):
1500
"""Custom log displayer for foreign revision identifiers.
1502
:param rev: Revision object.
1504
# Revision comes directly from a foreign repository
1505
if isinstance(rev, foreign.ForeignRevision):
1506
return self._format_properties(
1507
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1509
# Imported foreign revision revision ids always contain :
1510
if not ":" in rev.revision_id:
1513
# Revision was once imported from a foreign repository
1515
foreign_revid, mapping = \
1516
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1517
except errors.InvalidRevisionId:
1520
return self._format_properties(
1521
mapping.vcs.show_foreign_revid(foreign_revid))
1523
def _format_properties(self, properties):
1525
for key, value in properties.items():
1526
lines.append(key + ': ' + value)
1529
def show_diff(self, to_file, diff, indent):
1530
for l in diff.rstrip().split('\n'):
1531
to_file.write(indent + '%s\n' % (l,))
1534
# Separator between revisions in long format
1535
_LONG_SEP = '-' * 60
1538
class LongLogFormatter(LogFormatter):
1540
supports_merge_revisions = True
1541
preferred_levels = 1
1542
supports_delta = True
1543
supports_tags = True
1544
supports_diff = True
1545
supports_signatures = True
1547
def __init__(self, *args, **kwargs):
1548
super(LongLogFormatter, self).__init__(*args, **kwargs)
1549
if self.show_timezone == 'original':
1550
self.date_string = self._date_string_original_timezone
1552
self.date_string = self._date_string_with_timezone
1554
def _date_string_with_timezone(self, rev):
1555
return format_date(rev.timestamp, rev.timezone or 0,
1558
def _date_string_original_timezone(self, rev):
1559
return format_date_with_offset_in_original_timezone(rev.timestamp,
1562
def log_revision(self, revision):
1563
"""Log a revision, either merged or not."""
1564
indent = ' ' * revision.merge_depth
1566
if revision.revno is not None:
1567
lines.append('revno: %s%s' % (revision.revno,
1568
self.merge_marker(revision)))
1570
lines.append('tags: %s' % (', '.join(revision.tags)))
1571
if self.show_ids or revision.revno is None:
1572
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1574
for parent_id in revision.rev.parent_ids:
1575
lines.append('parent: %s' % (parent_id,))
1576
lines.extend(self.custom_properties(revision.rev))
1578
committer = revision.rev.committer
1579
authors = self.authors(revision.rev, 'all')
1580
if authors != [committer]:
1581
lines.append('author: %s' % (", ".join(authors),))
1582
lines.append('committer: %s' % (committer,))
1584
branch_nick = revision.rev.properties.get('branch-nick', None)
1585
if branch_nick is not None:
1586
lines.append('branch nick: %s' % (branch_nick,))
1588
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1590
if revision.signature is not None:
1591
lines.append('signature: ' + revision.signature)
1593
lines.append('message:')
1594
if not revision.rev.message:
1595
lines.append(' (no message)')
1597
message = revision.rev.message.rstrip('\r\n')
1598
for l in message.split('\n'):
1599
lines.append(' %s' % (l,))
1601
# Dump the output, appending the delta and diff if requested
1602
to_file = self.to_file
1603
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1604
if revision.delta is not None:
1605
# Use the standard status output to display changes
1606
from breezy.delta import report_delta
1607
report_delta(to_file, revision.delta, short_status=False,
1608
show_ids=self.show_ids, indent=indent)
1609
if revision.diff is not None:
1610
to_file.write(indent + 'diff:\n')
1612
# Note: we explicitly don't indent the diff (relative to the
1613
# revision information) so that the output can be fed to patch -p0
1614
self.show_diff(self.to_exact_file, revision.diff, indent)
1615
self.to_exact_file.flush()
1617
def get_advice_separator(self):
1618
"""Get the text separating the log from the closing advice."""
1619
return '-' * 60 + '\n'
1622
class ShortLogFormatter(LogFormatter):
1624
supports_merge_revisions = True
1625
preferred_levels = 1
1626
supports_delta = True
1627
supports_tags = True
1628
supports_diff = True
1630
def __init__(self, *args, **kwargs):
1631
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1632
self.revno_width_by_depth = {}
1634
def log_revision(self, revision):
1635
# We need two indents: one per depth and one for the information
1636
# relative to that indent. Most mainline revnos are 5 chars or
1637
# less while dotted revnos are typically 11 chars or less. Once
1638
# calculated, we need to remember the offset for a given depth
1639
# as we might be starting from a dotted revno in the first column
1640
# and we want subsequent mainline revisions to line up.
1641
depth = revision.merge_depth
1642
indent = ' ' * depth
1643
revno_width = self.revno_width_by_depth.get(depth)
1644
if revno_width is None:
1645
if revision.revno is None or revision.revno.find('.') == -1:
1646
# mainline revno, e.g. 12345
1649
# dotted revno, e.g. 12345.10.55
1651
self.revno_width_by_depth[depth] = revno_width
1652
offset = ' ' * (revno_width + 1)
1654
to_file = self.to_file
1657
tags = ' {%s}' % (', '.join(revision.tags))
1658
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1659
revision.revno or "", self.short_author(revision.rev),
1660
format_date(revision.rev.timestamp,
1661
revision.rev.timezone or 0,
1662
self.show_timezone, date_fmt="%Y-%m-%d",
1664
tags, self.merge_marker(revision)))
1665
self.show_properties(revision.rev, indent+offset)
1666
if self.show_ids or revision.revno is None:
1667
to_file.write(indent + offset + 'revision-id:%s\n'
1668
% (revision.rev.revision_id,))
1669
if not revision.rev.message:
1670
to_file.write(indent + offset + '(no message)\n')
1672
message = revision.rev.message.rstrip('\r\n')
1673
for l in message.split('\n'):
1674
to_file.write(indent + offset + '%s\n' % (l,))
1676
if revision.delta is not None:
1677
# Use the standard status output to display changes
1678
from breezy.delta import report_delta
1679
report_delta(to_file, revision.delta,
1680
short_status=self.delta_format==1,
1681
show_ids=self.show_ids, indent=indent + offset)
1682
if revision.diff is not None:
1683
self.show_diff(self.to_exact_file, revision.diff, ' ')
1687
class LineLogFormatter(LogFormatter):
1689
supports_merge_revisions = True
1690
preferred_levels = 1
1691
supports_tags = True
1693
def __init__(self, *args, **kwargs):
1694
super(LineLogFormatter, self).__init__(*args, **kwargs)
1695
width = terminal_width()
1696
if width is not None:
1697
# we need one extra space for terminals that wrap on last char
1699
self._max_chars = width
1701
def truncate(self, str, max_len):
1702
if max_len is None or len(str) <= max_len:
1704
return str[:max_len-3] + '...'
1706
def date_string(self, rev):
1707
return format_date(rev.timestamp, rev.timezone or 0,
1708
self.show_timezone, date_fmt="%Y-%m-%d",
1711
def message(self, rev):
1713
return '(no message)'
1717
def log_revision(self, revision):
1718
indent = ' ' * revision.merge_depth
1719
self.to_file.write(self.log_string(revision.revno, revision.rev,
1720
self._max_chars, revision.tags, indent))
1721
self.to_file.write('\n')
1723
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1724
"""Format log info into one string. Truncate tail of string
1726
:param revno: revision number or None.
1727
Revision numbers counts from 1.
1728
:param rev: revision object
1729
:param max_chars: maximum length of resulting string
1730
:param tags: list of tags or None
1731
:param prefix: string to prefix each line
1732
:return: formatted truncated string
1736
# show revno only when is not None
1737
out.append("%s:" % revno)
1738
if max_chars is not None:
1739
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1741
out.append(self.short_author(rev))
1742
out.append(self.date_string(rev))
1743
if len(rev.parent_ids) > 1:
1744
out.append('[merge]')
1746
tag_str = '{%s}' % (', '.join(tags))
1748
out.append(rev.get_summary())
1749
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1752
class GnuChangelogLogFormatter(LogFormatter):
1754
supports_merge_revisions = True
1755
supports_delta = True
1757
def log_revision(self, revision):
1758
"""Log a revision, either merged or not."""
1759
to_file = self.to_file
1761
date_str = format_date(revision.rev.timestamp,
1762
revision.rev.timezone or 0,
1764
date_fmt='%Y-%m-%d',
1766
committer_str = self.authors(revision.rev, 'first', sep=', ')
1767
committer_str = committer_str.replace(' <', ' <')
1768
to_file.write('%s %s\n\n' % (date_str, committer_str))
1770
if revision.delta is not None and revision.delta.has_changed():
1771
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1773
to_file.write('\t* %s:\n' % (path,))
1774
for c in revision.delta.renamed:
1775
oldpath, newpath = c[:2]
1776
# For renamed files, show both the old and the new path
1777
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1780
if not revision.rev.message:
1781
to_file.write('\tNo commit message\n')
1783
message = revision.rev.message.rstrip('\r\n')
1784
for l in message.split('\n'):
1785
to_file.write('\t%s\n' % (l.lstrip(),))
1789
def line_log(rev, max_chars):
1790
lf = LineLogFormatter(None)
1791
return lf.log_string(None, rev, max_chars)
1794
class LogFormatterRegistry(registry.Registry):
1795
"""Registry for log formatters"""
1797
def make_formatter(self, name, *args, **kwargs):
1798
"""Construct a formatter from arguments.
1800
:param name: Name of the formatter to construct. 'short', 'long' and
1801
'line' are built-in.
1803
return self.get(name)(*args, **kwargs)
1805
def get_default(self, branch):
1806
c = branch.get_config_stack()
1807
return self.get(c.get('log_format'))
1810
log_formatter_registry = LogFormatterRegistry()
1813
log_formatter_registry.register('short', ShortLogFormatter,
1814
'Moderately short log format.')
1815
log_formatter_registry.register('long', LongLogFormatter,
1816
'Detailed log format.')
1817
log_formatter_registry.register('line', LineLogFormatter,
1818
'Log format with one line per revision.')
1819
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1820
'Format used by GNU ChangeLog files.')
1823
def register_formatter(name, formatter):
1824
log_formatter_registry.register(name, formatter)
1827
def log_formatter(name, *args, **kwargs):
1828
"""Construct a formatter from arguments.
1830
name -- Name of the formatter to construct; currently 'long', 'short' and
1831
'line' are supported.
1834
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1836
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1839
def author_list_all(rev):
1840
return rev.get_apparent_authors()[:]
1843
def author_list_first(rev):
1844
lst = rev.get_apparent_authors()
1851
def author_list_committer(rev):
1852
return [rev.committer]
1855
author_list_registry = registry.Registry()
1857
author_list_registry.register('all', author_list_all,
1860
author_list_registry.register('first', author_list_first,
1863
author_list_registry.register('committer', author_list_committer,
1867
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1869
"""Show the change in revision history comparing the old revision history to the new one.
1871
:param branch: The branch where the revisions exist
1872
:param old_rh: The old revision history
1873
:param new_rh: The new revision history
1874
:param to_file: A file to write the results to. If None, stdout will be used
1877
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1879
lf = log_formatter(log_format,
1882
show_timezone='original')
1884
# This is the first index which is different between
1887
for i in range(max(len(new_rh), len(old_rh))):
1888
if (len(new_rh) <= i
1890
or new_rh[i] != old_rh[i]):
1894
if base_idx is None:
1895
to_file.write('Nothing seems to have changed\n')
1897
## TODO: It might be nice to do something like show_log
1898
## and show the merged entries. But since this is the
1899
## removed revisions, it shouldn't be as important
1900
if base_idx < len(old_rh):
1901
to_file.write('*'*60)
1902
to_file.write('\nRemoved Revisions:\n')
1903
for i in range(base_idx, len(old_rh)):
1904
rev = branch.repository.get_revision(old_rh[i])
1905
lr = LogRevision(rev, i+1, 0, None)
1907
to_file.write('*'*60)
1908
to_file.write('\n\n')
1909
if base_idx < len(new_rh):
1910
to_file.write('Added Revisions:\n')
1915
direction='forward',
1916
start_revision=base_idx+1,
1917
end_revision=len(new_rh),
1921
def get_history_change(old_revision_id, new_revision_id, repository):
1922
"""Calculate the uncommon lefthand history between two revisions.
1924
:param old_revision_id: The original revision id.
1925
:param new_revision_id: The new revision id.
1926
:param repository: The repository to use for the calculation.
1928
return old_history, new_history
1931
old_revisions = set()
1933
new_revisions = set()
1934
graph = repository.get_graph()
1935
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1936
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1937
stop_revision = None
1940
while do_new or do_old:
1943
new_revision = next(new_iter)
1944
except StopIteration:
1947
new_history.append(new_revision)
1948
new_revisions.add(new_revision)
1949
if new_revision in old_revisions:
1950
stop_revision = new_revision
1954
old_revision = next(old_iter)
1955
except StopIteration:
1958
old_history.append(old_revision)
1959
old_revisions.add(old_revision)
1960
if old_revision in new_revisions:
1961
stop_revision = old_revision
1963
new_history.reverse()
1964
old_history.reverse()
1965
if stop_revision is not None:
1966
new_history = new_history[new_history.index(stop_revision) + 1:]
1967
old_history = old_history[old_history.index(stop_revision) + 1:]
1968
return old_history, new_history
1971
def show_branch_change(branch, output, old_revno, old_revision_id):
1972
"""Show the changes made to a branch.
1974
:param branch: The branch to show changes about.
1975
:param output: A file-like object to write changes to.
1976
:param old_revno: The revno of the old tip.
1977
:param old_revision_id: The revision_id of the old tip.
1979
new_revno, new_revision_id = branch.last_revision_info()
1980
old_history, new_history = get_history_change(old_revision_id,
1983
if old_history == [] and new_history == []:
1984
output.write('Nothing seems to have changed\n')
1987
log_format = log_formatter_registry.get_default(branch)
1988
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1989
if old_history != []:
1990
output.write('*'*60)
1991
output.write('\nRemoved Revisions:\n')
1992
show_flat_log(branch.repository, old_history, old_revno, lf)
1993
output.write('*'*60)
1994
output.write('\n\n')
1995
if new_history != []:
1996
output.write('Added Revisions:\n')
1997
start_revno = new_revno - len(new_history) + 1
1998
show_log(branch, lf, None, verbose=False, direction='forward',
1999
start_revision=start_revno,)
2002
def show_flat_log(repository, history, last_revno, lf):
2003
"""Show a simple log of the specified history.
2005
:param repository: The repository to retrieve revisions from.
2006
:param history: A list of revision_ids indicating the lefthand history.
2007
:param last_revno: The revno of the last revision_id in the history.
2008
:param lf: The log formatter to use.
2010
start_revno = last_revno - len(history) + 1
2011
revisions = repository.get_revisions(history)
2012
for i, rev in enumerate(revisions):
2013
lr = LogRevision(rev, i + last_revno, 0, None)
2017
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2018
"""Find file-ids and kinds given a list of files and a revision range.
2020
We search for files at the end of the range. If not found there,
2021
we try the start of the range.
2023
:param revisionspec_list: revision range as parsed on the command line
2024
:param file_list: the list of paths given on the command line;
2025
the first of these can be a branch location or a file path,
2026
the remainder must be file paths
2027
:param add_cleanup: When the branch returned is read locked,
2028
an unlock call will be queued to the cleanup.
2029
:return: (branch, info_list, start_rev_info, end_rev_info) where
2030
info_list is a list of (relative_path, file_id, kind) tuples where
2031
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2032
branch will be read-locked.
2034
from breezy.builtins import _get_revision_range
2035
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2037
add_cleanup(b.lock_read().unlock)
2038
# XXX: It's damn messy converting a list of paths to relative paths when
2039
# those paths might be deleted ones, they might be on a case-insensitive
2040
# filesystem and/or they might be in silly locations (like another branch).
2041
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2042
# file2 implicitly in the same dir as file1 or should its directory be
2043
# taken from the current tree somehow?) For now, this solves the common
2044
# case of running log in a nested directory, assuming paths beyond the
2045
# first one haven't been deleted ...
2047
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2049
relpaths = [path] + file_list[1:]
2051
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2053
if relpaths in ([], [u'']):
2054
return b, [], start_rev_info, end_rev_info
2055
if start_rev_info is None and end_rev_info is None:
2057
tree = b.basis_tree()
2060
file_id = tree.path2id(fp)
2061
kind = _get_kind_for_file_id(tree, fp, file_id)
2063
# go back to when time began
2066
rev1 = b.get_rev_id(1)
2067
except errors.NoSuchRevision:
2072
tree1 = b.repository.revision_tree(rev1)
2074
file_id = tree1.path2id(fp)
2075
kind = _get_kind_for_file_id(tree1, fp, file_id)
2076
info_list.append((fp, file_id, kind))
2078
elif start_rev_info == end_rev_info:
2079
# One revision given - file must exist in it
2080
tree = b.repository.revision_tree(end_rev_info.rev_id)
2082
file_id = tree.path2id(fp)
2083
kind = _get_kind_for_file_id(tree, fp, file_id)
2084
info_list.append((fp, file_id, kind))
2087
# Revision range given. Get the file-id from the end tree.
2088
# If that fails, try the start tree.
2089
rev_id = end_rev_info.rev_id
2091
tree = b.basis_tree()
2093
tree = b.repository.revision_tree(rev_id)
2096
file_id = tree.path2id(fp)
2097
kind = _get_kind_for_file_id(tree, fp, file_id)
2100
rev_id = start_rev_info.rev_id
2102
rev1 = b.get_rev_id(1)
2103
tree1 = b.repository.revision_tree(rev1)
2105
tree1 = b.repository.revision_tree(rev_id)
2106
file_id = tree1.path2id(fp)
2107
kind = _get_kind_for_file_id(tree1, fp, file_id)
2108
info_list.append((fp, file_id, kind))
2109
return b, info_list, start_rev_info, end_rev_info
2112
def _get_kind_for_file_id(tree, path, file_id):
2113
"""Return the kind of a file-id or None if it doesn't exist."""
2114
if file_id is not None:
2115
return tree.kind(path, file_id)
2120
properties_handler_registry = registry.Registry()
2122
# Use the properties handlers to print out bug information if available
2123
def _bugs_properties_handler(revision):
2124
if 'bugs' in revision.properties:
2125
bug_lines = revision.properties['bugs'].split('\n')
2126
bug_rows = [line.split(' ', 1) for line in bug_lines]
2127
fixed_bug_urls = [row[0] for row in bug_rows if
2128
len(row) > 1 and row[1] == 'fixed']
2131
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2132
' '.join(fixed_bug_urls)}
2135
properties_handler_registry.register('bugs_properties_handler',
2136
_bugs_properties_handler)
2139
# adapters which revision ids to log are filtered. When log is called, the
2140
# log_rev_iterator is adapted through each of these factory methods.
2141
# Plugins are welcome to mutate this list in any way they like - as long
2142
# as the overall behaviour is preserved. At this point there is no extensible
2143
# mechanism for getting parameters to each factory method, and until there is
2144
# this won't be considered a stable api.
2148
# read revision objects
2149
_make_revision_objects,
2150
# filter on log messages
2151
_make_search_filter,
2152
# generate deltas for things we will show