1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
53
from io import BytesIO
57
from warnings import (
61
from .lazy_import import lazy_import
62
lazy_import(globals(), """
70
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
81
from .osutils import (
83
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
85
get_terminal_encoding,
88
from .tree import find_previous_path
91
def find_touching_revisions(repository, last_revision, last_tree, last_path):
92
"""Yield a description of revisions which affect the file_id.
94
Each returned element is (revno, revision_id, description)
96
This is the list of revisions where the file is either added,
97
modified, renamed or deleted.
99
TODO: Perhaps some way to limit this to only particular revisions,
100
or to traverse a non-mainline set of revisions?
102
last_verifier = last_tree.get_file_verifier(last_path)
103
graph = repository.get_graph()
104
history = list(graph.iter_lefthand_ancestry(last_revision, []))
106
for revision_id in history:
107
this_tree = repository.revision_tree(revision_id)
108
this_path = find_previous_path(last_tree, this_tree, last_path)
110
# now we know how it was last time, and how it is in this revision.
111
# are those two states effectively the same or not?
112
if this_path is not None and last_path is None:
113
yield revno, revision_id, "deleted " + this_path
114
this_verifier = this_tree.get_file_verifier(this_path)
115
elif this_path is None and last_path is not None:
116
yield revno, revision_id, "added " + last_path
117
elif this_path != last_path:
118
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
119
this_verifier = this_tree.get_file_verifier(this_path)
121
this_verifier = this_tree.get_file_verifier(this_path)
122
if (this_verifier != last_verifier):
123
yield revno, revision_id, "modified " + this_path
125
last_verifier = this_verifier
126
last_path = this_path
127
last_tree = this_tree
128
if last_path is None:
143
"""Write out human-readable log of commits to this branch.
145
This function is being retained for backwards compatibility but
146
should not be extended with new parameters. Use the new Logger class
147
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
148
make_log_request_dict function.
150
:param lf: The LogFormatter object showing the output.
152
:param verbose: If True show added/changed/deleted/renamed files.
154
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
157
:param start_revision: If not None, only show revisions >= start_revision
159
:param end_revision: If not None, only show revisions <= end_revision
161
:param search: If not None, only show revisions with matching commit
164
:param limit: If set, shows only 'limit' revisions, all revisions are shown
167
:param show_diff: If True, output a diff after each revision.
169
:param match: Dictionary of search lists to use when matching revision
181
if isinstance(start_revision, int):
183
start_revision = revisionspec.RevisionInfo(branch, start_revision)
184
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
185
raise errors.InvalidRevisionNumber(start_revision)
187
if isinstance(end_revision, int):
189
end_revision = revisionspec.RevisionInfo(branch, end_revision)
190
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
191
raise errors.InvalidRevisionNumber(end_revision)
193
if end_revision is not None and end_revision.revno == 0:
194
raise errors.InvalidRevisionNumber(end_revision.revno)
196
# Build the request and execute it
197
rqst = make_log_request_dict(
199
start_revision=start_revision, end_revision=end_revision,
200
limit=limit, message_search=search,
201
delta_type=delta_type, diff_type=diff_type)
202
Logger(branch, rqst).show(lf)
205
# Note: This needs to be kept in sync with the defaults in
206
# make_log_request_dict() below
207
_DEFAULT_REQUEST_PARAMS = {
208
'direction': 'reverse',
210
'generate_tags': True,
211
'exclude_common_ancestry': False,
212
'_match_using_deltas': True,
216
def make_log_request_dict(direction='reverse', specific_fileids=None,
217
start_revision=None, end_revision=None, limit=None,
218
message_search=None, levels=None, generate_tags=True,
220
diff_type=None, _match_using_deltas=True,
221
exclude_common_ancestry=False, match=None,
222
signature=False, omit_merges=False,
224
"""Convenience function for making a logging request dictionary.
226
Using this function may make code slightly safer by ensuring
227
parameters have the correct names. It also provides a reference
228
point for documenting the supported parameters.
230
:param direction: 'reverse' (default) is latest to earliest;
231
'forward' is earliest to latest.
233
:param specific_fileids: If not None, only include revisions
234
affecting the specified files, rather than all revisions.
236
:param start_revision: If not None, only generate
237
revisions >= start_revision
239
:param end_revision: If not None, only generate
240
revisions <= end_revision
242
:param limit: If set, generate only 'limit' revisions, all revisions
243
are shown if None or 0.
245
:param message_search: If not None, only include revisions with
246
matching commit messages
248
:param levels: the number of levels of revisions to
249
generate; 1 for just the mainline; 0 for all levels, or None for
252
:param generate_tags: If True, include tags for matched revisions.
254
:param delta_type: Either 'full', 'partial' or None.
255
'full' means generate the complete delta - adds/deletes/modifies/etc;
256
'partial' means filter the delta using specific_fileids;
257
None means do not generate any delta.
259
:param diff_type: Either 'full', 'partial' or None.
260
'full' means generate the complete diff - adds/deletes/modifies/etc;
261
'partial' means filter the diff using specific_fileids;
262
None means do not generate any diff.
264
:param _match_using_deltas: a private parameter controlling the
265
algorithm used for matching specific_fileids. This parameter
266
may be removed in the future so breezy client code should NOT
269
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
270
range operator or as a graph difference.
272
:param signature: show digital signature information
274
:param match: Dictionary of list of search strings to use when filtering
275
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
276
the empty string to match any of the preceding properties.
278
:param omit_merges: If True, commits with more than one parent are
282
# Take care of old style message_search parameter
285
if 'message' in match:
286
match['message'].append(message_search)
288
match['message'] = [message_search]
290
match = {'message': [message_search]}
292
'direction': direction,
293
'specific_fileids': specific_fileids,
294
'start_revision': start_revision,
295
'end_revision': end_revision,
298
'generate_tags': generate_tags,
299
'delta_type': delta_type,
300
'diff_type': diff_type,
301
'exclude_common_ancestry': exclude_common_ancestry,
302
'signature': signature,
304
'omit_merges': omit_merges,
305
# Add 'private' attributes for features that may be deprecated
306
'_match_using_deltas': _match_using_deltas,
310
def _apply_log_request_defaults(rqst):
311
"""Apply default values to a request dictionary."""
312
result = _DEFAULT_REQUEST_PARAMS.copy()
318
def format_signature_validity(rev_id, branch):
319
"""get the signature validity
321
:param rev_id: revision id to validate
322
:param branch: branch of revision
323
:return: human readable string to print to log
325
from breezy import gpg
327
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
328
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
329
if result[0] == gpg.SIGNATURE_VALID:
330
return u"valid signature from {0}".format(result[1])
331
if result[0] == gpg.SIGNATURE_KEY_MISSING:
332
return "unknown key {0}".format(result[1])
333
if result[0] == gpg.SIGNATURE_NOT_VALID:
334
return "invalid signature!"
335
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
336
return "no signature"
339
class LogGenerator(object):
340
"""A generator of log revisions."""
342
def iter_log_revisions(self):
343
"""Iterate over LogRevision objects.
345
:return: An iterator yielding LogRevision objects.
347
raise NotImplementedError(self.iter_log_revisions)
350
class Logger(object):
351
"""An object that generates, formats and displays a log."""
353
def __init__(self, branch, rqst):
356
:param branch: the branch to log
357
:param rqst: A dictionary specifying the query parameters.
358
See make_log_request_dict() for supported values.
361
self.rqst = _apply_log_request_defaults(rqst)
366
:param lf: The LogFormatter object to send the output to.
368
if not isinstance(lf, LogFormatter):
369
warn("not a LogFormatter instance: %r" % lf)
371
with self.branch.lock_read():
372
if getattr(lf, 'begin_log', None):
375
if getattr(lf, 'end_log', None):
378
def _show_body(self, lf):
379
"""Show the main log output.
381
Subclasses may wish to override this.
383
# Tweak the LogRequest based on what the LogFormatter can handle.
384
# (There's no point generating stuff if the formatter can't display it.)
386
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
387
# user didn't specify levels, use whatever the LF can handle:
388
rqst['levels'] = lf.get_levels()
390
if not getattr(lf, 'supports_tags', False):
391
rqst['generate_tags'] = False
392
if not getattr(lf, 'supports_delta', False):
393
rqst['delta_type'] = None
394
if not getattr(lf, 'supports_diff', False):
395
rqst['diff_type'] = None
396
if not getattr(lf, 'supports_signatures', False):
397
rqst['signature'] = False
399
# Find and print the interesting revisions
400
generator = self._generator_factory(self.branch, rqst)
402
for lr in generator.iter_log_revisions():
404
except errors.GhostRevisionUnusableHere:
405
raise errors.BzrCommandError(
406
gettext('Further revision history missing.'))
409
def _generator_factory(self, branch, rqst):
410
"""Make the LogGenerator object to use.
412
Subclasses may wish to override this.
414
return _DefaultLogGenerator(branch, rqst)
417
class _StartNotLinearAncestor(Exception):
418
"""Raised when a start revision is not found walking left-hand history."""
421
class _DefaultLogGenerator(LogGenerator):
422
"""The default generator of log revisions."""
424
def __init__(self, branch, rqst):
427
if rqst.get('generate_tags') and branch.supports_tags():
428
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
430
self.rev_tag_dict = {}
432
def iter_log_revisions(self):
433
"""Iterate over LogRevision objects.
435
:return: An iterator yielding LogRevision objects.
438
levels = rqst.get('levels')
439
limit = rqst.get('limit')
440
diff_type = rqst.get('diff_type')
441
show_signature = rqst.get('signature')
442
omit_merges = rqst.get('omit_merges')
444
revision_iterator = self._create_log_revision_iterator()
445
for revs in revision_iterator:
446
for (rev_id, revno, merge_depth), rev, delta in revs:
447
# 0 levels means show everything; merge_depth counts from 0
448
if (levels != 0 and merge_depth is not None and
449
merge_depth >= levels):
451
if omit_merges and len(rev.parent_ids) > 1:
454
raise errors.GhostRevisionUnusableHere(rev_id)
455
if diff_type is None:
458
diff = self._format_diff(rev, rev_id, diff_type)
460
signature = format_signature_validity(rev_id, self.branch)
464
rev, revno, merge_depth, delta,
465
self.rev_tag_dict.get(rev_id), diff, signature)
468
if log_count >= limit:
471
def _format_diff(self, rev, rev_id, diff_type):
472
repo = self.branch.repository
473
if len(rev.parent_ids) == 0:
474
ancestor_id = _mod_revision.NULL_REVISION
476
ancestor_id = rev.parent_ids[0]
477
tree_1 = repo.revision_tree(ancestor_id)
478
tree_2 = repo.revision_tree(rev_id)
479
file_ids = self.rqst.get('specific_fileids')
480
if diff_type == 'partial' and file_ids is not None:
481
specific_files = [tree_2.id2path(id) for id in file_ids]
483
specific_files = None
485
path_encoding = get_diff_header_encoding()
486
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
487
new_label='', path_encoding=path_encoding)
490
def _create_log_revision_iterator(self):
491
"""Create a revision iterator for log.
493
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
496
self.start_rev_id, self.end_rev_id = _get_revision_limits(
497
self.branch, self.rqst.get('start_revision'),
498
self.rqst.get('end_revision'))
499
if self.rqst.get('_match_using_deltas'):
500
return self._log_revision_iterator_using_delta_matching()
502
# We're using the per-file-graph algorithm. This scales really
503
# well but only makes sense if there is a single file and it's
505
file_count = len(self.rqst.get('specific_fileids'))
507
raise errors.BzrError(
508
"illegal LogRequest: must match-using-deltas "
509
"when logging %d files" % file_count)
510
return self._log_revision_iterator_using_per_file_graph()
512
def _log_revision_iterator_using_delta_matching(self):
513
# Get the base revisions, filtering by the revision range
515
generate_merge_revisions = rqst.get('levels') != 1
516
delayed_graph_generation = not rqst.get('specific_fileids') and (
517
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
518
view_revisions = _calc_view_revisions(
519
self.branch, self.start_rev_id, self.end_rev_id,
520
rqst.get('direction'),
521
generate_merge_revisions=generate_merge_revisions,
522
delayed_graph_generation=delayed_graph_generation,
523
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
525
# Apply the other filters
526
return make_log_rev_iterator(self.branch, view_revisions,
527
rqst.get('delta_type'), rqst.get('match'),
528
file_ids=rqst.get('specific_fileids'),
529
direction=rqst.get('direction'))
531
def _log_revision_iterator_using_per_file_graph(self):
532
# Get the base revisions, filtering by the revision range.
533
# Note that we always generate the merge revisions because
534
# filter_revisions_touching_file_id() requires them ...
536
view_revisions = _calc_view_revisions(
537
self.branch, self.start_rev_id, self.end_rev_id,
538
rqst.get('direction'), generate_merge_revisions=True,
539
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
540
if not isinstance(view_revisions, list):
541
view_revisions = list(view_revisions)
542
view_revisions = _filter_revisions_touching_file_id(self.branch,
543
rqst.get('specific_fileids')[
545
include_merges=rqst.get('levels') != 1)
546
return make_log_rev_iterator(self.branch, view_revisions,
547
rqst.get('delta_type'), rqst.get('match'))
550
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
551
generate_merge_revisions,
552
delayed_graph_generation=False,
553
exclude_common_ancestry=False,
555
"""Calculate the revisions to view.
557
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
558
a list of the same tuples.
560
if (exclude_common_ancestry and start_rev_id == end_rev_id):
561
raise errors.BzrCommandError(gettext(
562
'--exclude-common-ancestry requires two different revisions'))
563
if direction not in ('reverse', 'forward'):
564
raise ValueError(gettext('invalid direction %r') % direction)
565
br_rev_id = branch.last_revision()
566
if br_rev_id == _mod_revision.NULL_REVISION:
569
if (end_rev_id and start_rev_id == end_rev_id
570
and (not generate_merge_revisions
571
or not _has_merges(branch, end_rev_id))):
572
# If a single revision is requested, check we can handle it
573
return _generate_one_revision(branch, end_rev_id, br_rev_id,
575
if not generate_merge_revisions:
577
# If we only want to see linear revisions, we can iterate ...
578
iter_revs = _linear_view_revisions(
579
branch, start_rev_id, end_rev_id,
580
exclude_common_ancestry=exclude_common_ancestry)
581
# If a start limit was given and it's not obviously an
582
# ancestor of the end limit, check it before outputting anything
583
if (direction == 'forward'
584
or (start_rev_id and not _is_obvious_ancestor(
585
branch, start_rev_id, end_rev_id))):
586
iter_revs = list(iter_revs)
587
if direction == 'forward':
588
iter_revs = reversed(iter_revs)
590
except _StartNotLinearAncestor:
591
# Switch to the slower implementation that may be able to find a
592
# non-obvious ancestor out of the left-hand history.
594
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
595
direction, delayed_graph_generation,
596
exclude_common_ancestry)
597
if direction == 'forward':
598
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
602
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
603
if rev_id == br_rev_id:
605
return [(br_rev_id, br_revno, 0)]
607
revno_str = _compute_revno_str(branch, rev_id)
608
return [(rev_id, revno_str, 0)]
611
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
612
delayed_graph_generation,
613
exclude_common_ancestry=False):
614
# On large trees, generating the merge graph can take 30-60 seconds
615
# so we delay doing it until a merge is detected, incrementally
616
# returning initial (non-merge) revisions while we can.
618
# The above is only true for old formats (<= 0.92), for newer formats, a
619
# couple of seconds only should be needed to load the whole graph and the
620
# other graph operations needed are even faster than that -- vila 100201
621
initial_revisions = []
622
if delayed_graph_generation:
624
for rev_id, revno, depth in _linear_view_revisions(
625
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
626
if _has_merges(branch, rev_id):
627
# The end_rev_id can be nested down somewhere. We need an
628
# explicit ancestry check. There is an ambiguity here as we
629
# may not raise _StartNotLinearAncestor for a revision that
630
# is an ancestor but not a *linear* one. But since we have
631
# loaded the graph to do the check (or calculate a dotted
632
# revno), we may as well accept to show the log... We need
633
# the check only if start_rev_id is not None as all
634
# revisions have _mod_revision.NULL_REVISION as an ancestor
636
graph = branch.repository.get_graph()
637
if (start_rev_id is not None
638
and not graph.is_ancestor(start_rev_id, end_rev_id)):
639
raise _StartNotLinearAncestor()
640
# Since we collected the revisions so far, we need to
645
initial_revisions.append((rev_id, revno, depth))
647
# No merged revisions found
648
return initial_revisions
649
except _StartNotLinearAncestor:
650
# A merge was never detected so the lower revision limit can't
651
# be nested down somewhere
652
raise errors.BzrCommandError(gettext('Start revision not found in'
653
' history of end revision.'))
655
# We exit the loop above because we encounter a revision with merges, from
656
# this revision, we need to switch to _graph_view_revisions.
658
# A log including nested merges is required. If the direction is reverse,
659
# we rebase the initial merge depths so that the development line is
660
# shown naturally, i.e. just like it is for linear logging. We can easily
661
# make forward the exact opposite display, but showing the merge revisions
662
# indented at the end seems slightly nicer in that case.
663
view_revisions = itertools.chain(iter(initial_revisions),
664
_graph_view_revisions(branch, start_rev_id, end_rev_id,
665
rebase_initial_depths=(
666
direction == 'reverse'),
667
exclude_common_ancestry=exclude_common_ancestry))
668
return view_revisions
671
def _has_merges(branch, rev_id):
672
"""Does a revision have multiple parents or not?"""
673
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
674
return len(parents) > 1
677
def _compute_revno_str(branch, rev_id):
678
"""Compute the revno string from a rev_id.
680
:return: The revno string, or None if the revision is not in the supplied
684
revno = branch.revision_id_to_dotted_revno(rev_id)
685
except errors.NoSuchRevision:
686
# The revision must be outside of this branch
689
return '.'.join(str(n) for n in revno)
692
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
693
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
694
if start_rev_id and end_rev_id:
696
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
697
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
698
except errors.NoSuchRevision:
699
# one or both is not in the branch; not obvious
701
if len(start_dotted) == 1 and len(end_dotted) == 1:
703
return start_dotted[0] <= end_dotted[0]
704
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
705
start_dotted[0:1] == end_dotted[0:1]):
706
# both on same development line
707
return start_dotted[2] <= end_dotted[2]
711
# if either start or end is not specified then we use either the first or
712
# the last revision and *they* are obvious ancestors.
716
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
717
exclude_common_ancestry=False):
718
"""Calculate a sequence of revisions to view, newest to oldest.
720
:param start_rev_id: the lower revision-id
721
:param end_rev_id: the upper revision-id
722
:param exclude_common_ancestry: Whether the start_rev_id should be part of
723
the iterated revisions.
724
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
725
dotted_revno will be None for ghosts
726
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
727
is not found walking the left-hand history
729
repo = branch.repository
730
graph = repo.get_graph()
731
if start_rev_id is None and end_rev_id is None:
732
if branch._format.stores_revno() or \
733
config.GlobalStack().get('calculate_revnos'):
735
br_revno, br_rev_id = branch.last_revision_info()
736
except errors.GhostRevisionsHaveNoRevno:
737
br_rev_id = branch.last_revision()
742
br_rev_id = branch.last_revision()
745
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
746
(_mod_revision.NULL_REVISION,))
749
revision_id = next(graph_iter)
750
except errors.RevisionNotPresent as e:
752
yield e.revision_id, None, None
754
except StopIteration:
757
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
758
if cur_revno is not None:
761
br_rev_id = branch.last_revision()
762
if end_rev_id is None:
763
end_rev_id = br_rev_id
764
found_start = start_rev_id is None
765
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
766
(_mod_revision.NULL_REVISION,))
769
revision_id = next(graph_iter)
770
except StopIteration:
772
except errors.RevisionNotPresent as e:
774
yield e.revision_id, None, None
777
revno_str = _compute_revno_str(branch, revision_id)
778
if not found_start and revision_id == start_rev_id:
779
if not exclude_common_ancestry:
780
yield revision_id, revno_str, 0
784
yield revision_id, revno_str, 0
786
raise _StartNotLinearAncestor()
789
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
790
rebase_initial_depths=True,
791
exclude_common_ancestry=False):
792
"""Calculate revisions to view including merges, newest to oldest.
794
:param branch: the branch
795
:param start_rev_id: the lower revision-id
796
:param end_rev_id: the upper revision-id
797
:param rebase_initial_depth: should depths be rebased until a mainline
799
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
801
if exclude_common_ancestry:
802
stop_rule = 'with-merges-without-common-ancestry'
804
stop_rule = 'with-merges'
805
view_revisions = branch.iter_merge_sorted_revisions(
806
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
808
if not rebase_initial_depths:
809
for (rev_id, merge_depth, revno, end_of_merge
811
yield rev_id, '.'.join(map(str, revno)), merge_depth
813
# We're following a development line starting at a merged revision.
814
# We need to adjust depths down by the initial depth until we find
815
# a depth less than it. Then we use that depth as the adjustment.
816
# If and when we reach the mainline, depth adjustment ends.
817
depth_adjustment = None
818
for (rev_id, merge_depth, revno, end_of_merge
820
if depth_adjustment is None:
821
depth_adjustment = merge_depth
823
if merge_depth < depth_adjustment:
824
# From now on we reduce the depth adjustement, this can be
825
# surprising for users. The alternative requires two passes
826
# which breaks the fast display of the first revision
828
depth_adjustment = merge_depth
829
merge_depth -= depth_adjustment
830
yield rev_id, '.'.join(map(str, revno)), merge_depth
833
def _rebase_merge_depth(view_revisions):
834
"""Adjust depths upwards so the top level is 0."""
835
# If either the first or last revision have a merge_depth of 0, we're done
836
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
837
min_depth = min([d for r, n, d in view_revisions])
839
view_revisions = [(r, n, d - min_depth)
840
for r, n, d in view_revisions]
841
return view_revisions
844
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
845
file_ids=None, direction='reverse'):
846
"""Create a revision iterator for log.
848
:param branch: The branch being logged.
849
:param view_revisions: The revisions being viewed.
850
:param generate_delta: Whether to generate a delta for each revision.
851
Permitted values are None, 'full' and 'partial'.
852
:param search: A user text search string.
853
:param file_ids: If non empty, only revisions matching one or more of
854
the file-ids are to be kept.
855
:param direction: the direction in which view_revisions is sorted
856
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
859
# Convert view_revisions into (view, None, None) groups to fit with
860
# the standard interface here.
861
if isinstance(view_revisions, list):
862
# A single batch conversion is faster than many incremental ones.
863
# As we have all the data, do a batch conversion.
864
nones = [None] * len(view_revisions)
865
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
868
for view in view_revisions:
869
yield (view, None, None)
870
log_rev_iterator = iter([_convert()])
871
for adapter in log_adapters:
872
# It would be nicer if log adapters were first class objects
873
# with custom parameters. This will do for now. IGC 20090127
874
if adapter == _make_delta_filter:
875
log_rev_iterator = adapter(
876
branch, generate_delta, search, log_rev_iterator, file_ids,
879
log_rev_iterator = adapter(
880
branch, generate_delta, search, log_rev_iterator)
881
return log_rev_iterator
884
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
885
"""Create a filtered iterator of log_rev_iterator matching on a regex.
887
:param branch: The branch being logged.
888
:param generate_delta: Whether to generate a delta for each revision.
889
:param match: A dictionary with properties as keys and lists of strings
890
as values. To match, a revision may match any of the supplied strings
891
within a single property but must match at least one string for each
893
:param log_rev_iterator: An input iterator containing all revisions that
894
could be displayed, in lists.
895
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
899
return log_rev_iterator
900
# Use lazy_compile so mapping to InvalidPattern error occurs.
901
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
902
for k, v in match.items()]
903
return _filter_re(searchRE, log_rev_iterator)
906
def _filter_re(searchRE, log_rev_iterator):
907
for revs in log_rev_iterator:
908
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
913
def _match_filter(searchRE, rev):
915
'message': (rev.message,),
916
'committer': (rev.committer,),
917
'author': (rev.get_apparent_authors()),
918
'bugs': list(rev.iter_bugs())
920
strings[''] = [item for inner_list in strings.values()
921
for item in inner_list]
922
for k, v in searchRE:
923
if k in strings and not _match_any_filter(strings[k], v):
928
def _match_any_filter(strings, res):
929
return any(r.search(s) for r in res for s in strings)
932
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
933
fileids=None, direction='reverse'):
934
"""Add revision deltas to a log iterator if needed.
936
:param branch: The branch being logged.
937
:param generate_delta: Whether to generate a delta for each revision.
938
Permitted values are None, 'full' and 'partial'.
939
:param search: A user text search string.
940
:param log_rev_iterator: An input iterator containing all revisions that
941
could be displayed, in lists.
942
:param fileids: If non empty, only revisions matching one or more of
943
the file-ids are to be kept.
944
:param direction: the direction in which view_revisions is sorted
945
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
948
if not generate_delta and not fileids:
949
return log_rev_iterator
950
return _generate_deltas(branch.repository, log_rev_iterator,
951
generate_delta, fileids, direction)
954
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
956
"""Create deltas for each batch of revisions in log_rev_iterator.
958
If we're only generating deltas for the sake of filtering against
959
file-ids, we stop generating deltas once all file-ids reach the
960
appropriate life-cycle point. If we're receiving data newest to
961
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
963
check_fileids = fileids is not None and len(fileids) > 0
965
fileid_set = set(fileids)
966
if direction == 'reverse':
972
for revs in log_rev_iterator:
973
# If we were matching against fileids and we've run out,
974
# there's nothing left to do
975
if check_fileids and not fileid_set:
977
revisions = [rev[1] for rev in revs]
979
if delta_type == 'full' and not check_fileids:
980
deltas = repository.get_deltas_for_revisions(revisions)
981
for rev, delta in zip(revs, deltas):
982
new_revs.append((rev[0], rev[1], delta))
984
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
985
for rev, delta in zip(revs, deltas):
987
if delta is None or not delta.has_changed():
990
_update_fileids(delta, fileid_set, stop_on)
991
if delta_type is None:
993
elif delta_type == 'full':
994
# If the file matches all the time, rebuilding
995
# a full delta like this in addition to a partial
996
# one could be slow. However, it's likely that
997
# most revisions won't get this far, making it
998
# faster to filter on the partial deltas and
999
# build the occasional full delta than always
1000
# building full deltas and filtering those.
1002
delta = repository.get_revision_delta(rev_id)
1003
new_revs.append((rev[0], rev[1], delta))
1007
def _update_fileids(delta, fileids, stop_on):
1008
"""Update the set of file-ids to search based on file lifecycle events.
1010
:param fileids: a set of fileids to update
1011
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1012
fileids set once their add or remove entry is detected respectively
1014
if stop_on == 'add':
1015
for item in delta.added + delta.copied:
1016
if item.file_id in fileids:
1017
fileids.remove(item.file_id)
1018
elif stop_on == 'delete':
1019
for item in delta.removed:
1020
if item.file_id in fileids:
1021
fileids.remove(item.file_id)
1024
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1025
"""Extract revision objects from the repository
1027
:param branch: The branch being logged.
1028
:param generate_delta: Whether to generate a delta for each revision.
1029
:param search: A user text search string.
1030
:param log_rev_iterator: An input iterator containing all revisions that
1031
could be displayed, in lists.
1032
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1035
repository = branch.repository
1036
for revs in log_rev_iterator:
1037
# r = revision_id, n = revno, d = merge depth
1038
revision_ids = [view[0] for view, _, _ in revs]
1039
revisions = dict(repository.iter_revisions(revision_ids))
1040
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1043
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1044
"""Group up a single large batch into smaller ones.
1046
:param branch: The branch being logged.
1047
:param generate_delta: Whether to generate a delta for each revision.
1048
:param search: A user text search string.
1049
:param log_rev_iterator: An input iterator containing all revisions that
1050
could be displayed, in lists.
1051
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1055
for batch in log_rev_iterator:
1058
step = [detail for _, detail in zip(range(num), batch)]
1062
num = min(int(num * 1.5), 200)
1065
def _get_revision_limits(branch, start_revision, end_revision):
1066
"""Get and check revision limits.
1068
:param branch: The branch containing the revisions.
1070
:param start_revision: The first revision to be logged.
1071
but for merge revision support a RevisionInfo is expected.
1073
:param end_revision: The last revision to be logged.
1074
For backwards compatibility this may be a mainline integer revno,
1075
but for merge revision support a RevisionInfo is expected.
1077
:return: (start_rev_id, end_rev_id) tuple.
1081
if start_revision is not None:
1082
if not isinstance(start_revision, revisionspec.RevisionInfo):
1083
raise TypeError(start_revision)
1084
start_rev_id = start_revision.rev_id
1085
start_revno = start_revision.revno
1086
if start_revno is None:
1091
if end_revision is not None:
1092
if not isinstance(end_revision, revisionspec.RevisionInfo):
1093
raise TypeError(start_revision)
1094
end_rev_id = end_revision.rev_id
1095
end_revno = end_revision.revno
1097
if branch.last_revision() != _mod_revision.NULL_REVISION:
1098
if (start_rev_id == _mod_revision.NULL_REVISION
1099
or end_rev_id == _mod_revision.NULL_REVISION):
1100
raise errors.BzrCommandError(
1101
gettext('Logging revision 0 is invalid.'))
1102
if end_revno is not None and start_revno > end_revno:
1103
raise errors.BzrCommandError(
1104
gettext("Start revision must be older than the end revision."))
1105
return (start_rev_id, end_rev_id)
1108
def _get_mainline_revs(branch, start_revision, end_revision):
1109
"""Get the mainline revisions from the branch.
1111
Generates the list of mainline revisions for the branch.
1113
:param branch: The branch containing the revisions.
1115
:param start_revision: The first revision to be logged.
1116
For backwards compatibility this may be a mainline integer revno,
1117
but for merge revision support a RevisionInfo is expected.
1119
:param end_revision: The last revision to be logged.
1120
For backwards compatibility this may be a mainline integer revno,
1121
but for merge revision support a RevisionInfo is expected.
1123
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1125
branch_revno, branch_last_revision = branch.last_revision_info()
1126
if branch_revno == 0:
1127
return None, None, None, None
1129
# For mainline generation, map start_revision and end_revision to
1130
# mainline revnos. If the revision is not on the mainline choose the
1131
# appropriate extreme of the mainline instead - the extra will be
1133
# Also map the revisions to rev_ids, to be used in the later filtering
1136
if start_revision is None:
1139
if isinstance(start_revision, revisionspec.RevisionInfo):
1140
start_rev_id = start_revision.rev_id
1141
start_revno = start_revision.revno or 1
1143
branch.check_real_revno(start_revision)
1144
start_revno = start_revision
1147
if end_revision is None:
1148
end_revno = branch_revno
1150
if isinstance(end_revision, revisionspec.RevisionInfo):
1151
end_rev_id = end_revision.rev_id
1152
end_revno = end_revision.revno or branch_revno
1154
branch.check_real_revno(end_revision)
1155
end_revno = end_revision
1157
if ((start_rev_id == _mod_revision.NULL_REVISION)
1158
or (end_rev_id == _mod_revision.NULL_REVISION)):
1159
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1160
if start_revno > end_revno:
1161
raise errors.BzrCommandError(gettext("Start revision must be older "
1162
"than the end revision."))
1164
if end_revno < start_revno:
1165
return None, None, None, None
1166
cur_revno = branch_revno
1169
graph = branch.repository.get_graph()
1170
for revision_id in graph.iter_lefthand_ancestry(
1171
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1172
if cur_revno < start_revno:
1173
# We have gone far enough, but we always add 1 more revision
1174
rev_nos[revision_id] = cur_revno
1175
mainline_revs.append(revision_id)
1177
if cur_revno <= end_revno:
1178
rev_nos[revision_id] = cur_revno
1179
mainline_revs.append(revision_id)
1182
# We walked off the edge of all revisions, so we add a 'None' marker
1183
mainline_revs.append(None)
1185
mainline_revs.reverse()
1187
# override the mainline to look like the revision history.
1188
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1191
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1192
include_merges=True):
1193
r"""Return the list of revision ids which touch a given file id.
1195
The function filters view_revisions and returns a subset.
1196
This includes the revisions which directly change the file id,
1197
and the revisions which merge these changes. So if the
1210
And 'C' changes a file, then both C and D will be returned. F will not be
1211
returned even though it brings the changes to C into the branch starting
1212
with E. (Note that if we were using F as the tip instead of G, then we
1215
This will also be restricted based on a subset of the mainline.
1217
:param branch: The branch where we can get text revision information.
1219
:param file_id: Filter out revisions that do not touch file_id.
1221
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1222
tuples. This is the list of revisions which will be filtered. It is
1223
assumed that view_revisions is in merge_sort order (i.e. newest
1226
:param include_merges: include merge revisions in the result or not
1228
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1230
# Lookup all possible text keys to determine which ones actually modified
1232
graph = branch.repository.get_file_graph()
1233
get_parent_map = graph.get_parent_map
1234
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1236
# Looking up keys in batches of 1000 can cut the time in half, as well as
1237
# memory consumption. GraphIndex *does* like to look for a few keys in
1238
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1239
# TODO: This code needs to be re-evaluated periodically as we tune the
1240
# indexing layer. We might consider passing in hints as to the known
1241
# access pattern (sparse/clustered, high success rate/low success
1242
# rate). This particular access is clustered with a low success rate.
1243
modified_text_revisions = set()
1245
for start in range(0, len(text_keys), chunk_size):
1246
next_keys = text_keys[start:start + chunk_size]
1247
# Only keep the revision_id portion of the key
1248
modified_text_revisions.update(
1249
[k[1] for k in get_parent_map(next_keys)])
1250
del text_keys, next_keys
1253
# Track what revisions will merge the current revision, replace entries
1254
# with 'None' when they have been added to result
1255
current_merge_stack = [None]
1256
for info in view_revisions:
1257
rev_id, revno, depth = info
1258
if depth == len(current_merge_stack):
1259
current_merge_stack.append(info)
1261
del current_merge_stack[depth + 1:]
1262
current_merge_stack[-1] = info
1264
if rev_id in modified_text_revisions:
1265
# This needs to be logged, along with the extra revisions
1266
for idx in range(len(current_merge_stack)):
1267
node = current_merge_stack[idx]
1268
if node is not None:
1269
if include_merges or node[2] == 0:
1271
current_merge_stack[idx] = None
1275
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1276
"""Reverse revisions by depth.
1278
Revisions with a different depth are sorted as a group with the previous
1279
revision of that depth. There may be no topological justification for this
1280
but it looks much nicer.
1282
# Add a fake revision at start so that we can always attach sub revisions
1283
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1285
for val in merge_sorted_revisions:
1286
if val[2] == _depth:
1287
# Each revision at the current depth becomes a chunk grouping all
1288
# higher depth revisions.
1289
zd_revisions.append([val])
1291
zd_revisions[-1].append(val)
1292
for revisions in zd_revisions:
1293
if len(revisions) > 1:
1294
# We have higher depth revisions, let reverse them locally
1295
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1296
zd_revisions.reverse()
1298
for chunk in zd_revisions:
1299
result.extend(chunk)
1301
# Top level call, get rid of the fake revisions that have been added
1302
result = [r for r in result if r[0] is not None and r[1] is not None]
1306
class LogRevision(object):
1307
"""A revision to be logged (by LogFormatter.log_revision).
1309
A simple wrapper for the attributes of a revision to be logged.
1310
The attributes may or may not be populated, as determined by the
1311
logging options and the log formatter capabilities.
1314
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1315
tags=None, diff=None, signature=None):
1320
self.revno = str(revno)
1321
self.merge_depth = merge_depth
1325
self.signature = signature
1328
class LogFormatter(object):
1329
"""Abstract class to display log messages.
1331
At a minimum, a derived class must implement the log_revision method.
1333
If the LogFormatter needs to be informed of the beginning or end of
1334
a log it should implement the begin_log and/or end_log hook methods.
1336
A LogFormatter should define the following supports_XXX flags
1337
to indicate which LogRevision attributes it supports:
1339
- supports_delta must be True if this log formatter supports delta.
1340
Otherwise the delta attribute may not be populated. The 'delta_format'
1341
attribute describes whether the 'short_status' format (1) or the long
1342
one (2) should be used.
1344
- supports_merge_revisions must be True if this log formatter supports
1345
merge revisions. If not, then only mainline revisions will be passed
1348
- preferred_levels is the number of levels this formatter defaults to.
1349
The default value is zero meaning display all levels.
1350
This value is only relevant if supports_merge_revisions is True.
1352
- supports_tags must be True if this log formatter supports tags.
1353
Otherwise the tags attribute may not be populated.
1355
- supports_diff must be True if this log formatter supports diffs.
1356
Otherwise the diff attribute may not be populated.
1358
- supports_signatures must be True if this log formatter supports GPG
1361
Plugins can register functions to show custom revision properties using
1362
the properties_handler_registry. The registered function
1363
must respect the following interface description::
1365
def my_show_properties(properties_dict):
1366
# code that returns a dict {'name':'value'} of the properties
1369
preferred_levels = 0
1371
def __init__(self, to_file, show_ids=False, show_timezone='original',
1372
delta_format=None, levels=None, show_advice=False,
1373
to_exact_file=None, author_list_handler=None):
1374
"""Create a LogFormatter.
1376
:param to_file: the file to output to
1377
:param to_exact_file: if set, gives an output stream to which
1378
non-Unicode diffs are written.
1379
:param show_ids: if True, revision-ids are to be displayed
1380
:param show_timezone: the timezone to use
1381
:param delta_format: the level of delta information to display
1382
or None to leave it to the formatter to decide
1383
:param levels: the number of levels to display; None or -1 to
1384
let the log formatter decide.
1385
:param show_advice: whether to show advice at the end of the
1387
:param author_list_handler: callable generating a list of
1388
authors to display for a given revision
1390
self.to_file = to_file
1391
# 'exact' stream used to show diff, it should print content 'as is'
1392
# and should not try to decode/encode it to unicode to avoid bug
1394
if to_exact_file is not None:
1395
self.to_exact_file = to_exact_file
1397
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1398
# better for code that expects to get diffs to pass in the exact
1400
self.to_exact_file = getattr(to_file, 'stream', to_file)
1401
self.show_ids = show_ids
1402
self.show_timezone = show_timezone
1403
if delta_format is None:
1404
# Ensures backward compatibility
1405
delta_format = 2 # long format
1406
self.delta_format = delta_format
1407
self.levels = levels
1408
self._show_advice = show_advice
1409
self._merge_count = 0
1410
self._author_list_handler = author_list_handler
1412
def get_levels(self):
1413
"""Get the number of levels to display or 0 for all."""
1414
if getattr(self, 'supports_merge_revisions', False):
1415
if self.levels is None or self.levels == -1:
1416
self.levels = self.preferred_levels
1421
def log_revision(self, revision):
1424
:param revision: The LogRevision to be logged.
1426
raise NotImplementedError('not implemented in abstract base')
1428
def show_advice(self):
1429
"""Output user advice, if any, when the log is completed."""
1430
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1431
advice_sep = self.get_advice_separator()
1433
self.to_file.write(advice_sep)
1435
"Use --include-merged or -n0 to see merged revisions.\n")
1437
def get_advice_separator(self):
1438
"""Get the text separating the log from the closing advice."""
1441
def short_committer(self, rev):
1442
name, address = config.parse_username(rev.committer)
1447
def short_author(self, rev):
1448
return self.authors(rev, 'first', short=True, sep=', ')
1450
def authors(self, rev, who, short=False, sep=None):
1451
"""Generate list of authors, taking --authors option into account.
1453
The caller has to specify the name of a author list handler,
1454
as provided by the author list registry, using the ``who``
1455
argument. That name only sets a default, though: when the
1456
user selected a different author list generation using the
1457
``--authors`` command line switch, as represented by the
1458
``author_list_handler`` constructor argument, that value takes
1461
:param rev: The revision for which to generate the list of authors.
1462
:param who: Name of the default handler.
1463
:param short: Whether to shorten names to either name or address.
1464
:param sep: What separator to use for automatic concatenation.
1466
if self._author_list_handler is not None:
1467
# The user did specify --authors, which overrides the default
1468
author_list_handler = self._author_list_handler
1470
# The user didn't specify --authors, so we use the caller's default
1471
author_list_handler = author_list_registry.get(who)
1472
names = author_list_handler(rev)
1474
for i in range(len(names)):
1475
name, address = config.parse_username(names[i])
1481
names = sep.join(names)
1484
def merge_marker(self, revision):
1485
"""Get the merge marker to include in the output or '' if none."""
1486
if len(revision.rev.parent_ids) > 1:
1487
self._merge_count += 1
1492
def show_properties(self, revision, indent):
1493
"""Displays the custom properties returned by each registered handler.
1495
If a registered handler raises an error it is propagated.
1497
for line in self.custom_properties(revision):
1498
self.to_file.write("%s%s\n" % (indent, line))
1500
def custom_properties(self, revision):
1501
"""Format the custom properties returned by each registered handler.
1503
If a registered handler raises an error it is propagated.
1505
:return: a list of formatted lines (excluding trailing newlines)
1507
lines = self._foreign_info_properties(revision)
1508
for key, handler in properties_handler_registry.iteritems():
1510
lines.extend(self._format_properties(handler(revision)))
1512
trace.log_exception_quietly()
1513
trace.print_exception(sys.exc_info(), self.to_file)
1516
def _foreign_info_properties(self, rev):
1517
"""Custom log displayer for foreign revision identifiers.
1519
:param rev: Revision object.
1521
# Revision comes directly from a foreign repository
1522
if isinstance(rev, foreign.ForeignRevision):
1523
return self._format_properties(
1524
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1526
# Imported foreign revision revision ids always contain :
1527
if b":" not in rev.revision_id:
1530
# Revision was once imported from a foreign repository
1532
foreign_revid, mapping = \
1533
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1534
except errors.InvalidRevisionId:
1537
return self._format_properties(
1538
mapping.vcs.show_foreign_revid(foreign_revid))
1540
def _format_properties(self, properties):
1542
for key, value in properties.items():
1543
lines.append(key + ': ' + value)
1546
def show_diff(self, to_file, diff, indent):
1547
encoding = get_terminal_encoding()
1548
for l in diff.rstrip().split(b'\n'):
1549
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1552
# Separator between revisions in long format
1553
_LONG_SEP = '-' * 60
1556
class LongLogFormatter(LogFormatter):
1558
supports_merge_revisions = True
1559
preferred_levels = 1
1560
supports_delta = True
1561
supports_tags = True
1562
supports_diff = True
1563
supports_signatures = True
1565
def __init__(self, *args, **kwargs):
1566
super(LongLogFormatter, self).__init__(*args, **kwargs)
1567
if self.show_timezone == 'original':
1568
self.date_string = self._date_string_original_timezone
1570
self.date_string = self._date_string_with_timezone
1572
def _date_string_with_timezone(self, rev):
1573
return format_date(rev.timestamp, rev.timezone or 0,
1576
def _date_string_original_timezone(self, rev):
1577
return format_date_with_offset_in_original_timezone(rev.timestamp,
1580
def log_revision(self, revision):
1581
"""Log a revision, either merged or not."""
1582
indent = ' ' * revision.merge_depth
1584
if revision.revno is not None:
1585
lines.append('revno: %s%s' % (revision.revno,
1586
self.merge_marker(revision)))
1588
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1589
if self.show_ids or revision.revno is None:
1590
lines.append('revision-id: %s' %
1591
(revision.rev.revision_id.decode('utf-8'),))
1593
for parent_id in revision.rev.parent_ids:
1594
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1595
lines.extend(self.custom_properties(revision.rev))
1597
committer = revision.rev.committer
1598
authors = self.authors(revision.rev, 'all')
1599
if authors != [committer]:
1600
lines.append('author: %s' % (", ".join(authors),))
1601
lines.append('committer: %s' % (committer,))
1603
branch_nick = revision.rev.properties.get('branch-nick', None)
1604
if branch_nick is not None:
1605
lines.append('branch nick: %s' % (branch_nick,))
1607
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1609
if revision.signature is not None:
1610
lines.append('signature: ' + revision.signature)
1612
lines.append('message:')
1613
if not revision.rev.message:
1614
lines.append(' (no message)')
1616
message = revision.rev.message.rstrip('\r\n')
1617
for l in message.split('\n'):
1618
lines.append(' %s' % (l,))
1620
# Dump the output, appending the delta and diff if requested
1621
to_file = self.to_file
1622
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1623
if revision.delta is not None:
1624
# Use the standard status output to display changes
1625
from breezy.delta import report_delta
1626
report_delta(to_file, revision.delta, short_status=False,
1627
show_ids=self.show_ids, indent=indent)
1628
if revision.diff is not None:
1629
to_file.write(indent + 'diff:\n')
1631
# Note: we explicitly don't indent the diff (relative to the
1632
# revision information) so that the output can be fed to patch -p0
1633
self.show_diff(self.to_exact_file, revision.diff, indent)
1634
self.to_exact_file.flush()
1636
def get_advice_separator(self):
1637
"""Get the text separating the log from the closing advice."""
1638
return '-' * 60 + '\n'
1641
class ShortLogFormatter(LogFormatter):
1643
supports_merge_revisions = True
1644
preferred_levels = 1
1645
supports_delta = True
1646
supports_tags = True
1647
supports_diff = True
1649
def __init__(self, *args, **kwargs):
1650
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1651
self.revno_width_by_depth = {}
1653
def log_revision(self, revision):
1654
# We need two indents: one per depth and one for the information
1655
# relative to that indent. Most mainline revnos are 5 chars or
1656
# less while dotted revnos are typically 11 chars or less. Once
1657
# calculated, we need to remember the offset for a given depth
1658
# as we might be starting from a dotted revno in the first column
1659
# and we want subsequent mainline revisions to line up.
1660
depth = revision.merge_depth
1661
indent = ' ' * depth
1662
revno_width = self.revno_width_by_depth.get(depth)
1663
if revno_width is None:
1664
if revision.revno is None or revision.revno.find('.') == -1:
1665
# mainline revno, e.g. 12345
1668
# dotted revno, e.g. 12345.10.55
1670
self.revno_width_by_depth[depth] = revno_width
1671
offset = ' ' * (revno_width + 1)
1673
to_file = self.to_file
1676
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1677
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1678
revision.revno or "", self.short_author(
1680
format_date(revision.rev.timestamp,
1681
revision.rev.timezone or 0,
1682
self.show_timezone, date_fmt="%Y-%m-%d",
1684
tags, self.merge_marker(revision)))
1685
self.show_properties(revision.rev, indent + offset)
1686
if self.show_ids or revision.revno is None:
1687
to_file.write(indent + offset + 'revision-id:%s\n'
1688
% (revision.rev.revision_id.decode('utf-8'),))
1689
if not revision.rev.message:
1690
to_file.write(indent + offset + '(no message)\n')
1692
message = revision.rev.message.rstrip('\r\n')
1693
for l in message.split('\n'):
1694
to_file.write(indent + offset + '%s\n' % (l,))
1696
if revision.delta is not None:
1697
# Use the standard status output to display changes
1698
from breezy.delta import report_delta
1699
report_delta(to_file, revision.delta,
1700
short_status=self.delta_format == 1,
1701
show_ids=self.show_ids, indent=indent + offset)
1702
if revision.diff is not None:
1703
self.show_diff(self.to_exact_file, revision.diff, ' ')
1707
class LineLogFormatter(LogFormatter):
1709
supports_merge_revisions = True
1710
preferred_levels = 1
1711
supports_tags = True
1713
def __init__(self, *args, **kwargs):
1714
super(LineLogFormatter, self).__init__(*args, **kwargs)
1715
width = terminal_width()
1716
if width is not None:
1717
# we need one extra space for terminals that wrap on last char
1719
self._max_chars = width
1721
def truncate(self, str, max_len):
1722
if max_len is None or len(str) <= max_len:
1724
return str[:max_len - 3] + '...'
1726
def date_string(self, rev):
1727
return format_date(rev.timestamp, rev.timezone or 0,
1728
self.show_timezone, date_fmt="%Y-%m-%d",
1731
def message(self, rev):
1733
return '(no message)'
1737
def log_revision(self, revision):
1738
indent = ' ' * revision.merge_depth
1739
self.to_file.write(self.log_string(revision.revno, revision.rev,
1740
self._max_chars, revision.tags, indent))
1741
self.to_file.write('\n')
1743
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1744
"""Format log info into one string. Truncate tail of string
1746
:param revno: revision number or None.
1747
Revision numbers counts from 1.
1748
:param rev: revision object
1749
:param max_chars: maximum length of resulting string
1750
:param tags: list of tags or None
1751
:param prefix: string to prefix each line
1752
:return: formatted truncated string
1756
# show revno only when is not None
1757
out.append("%s:" % revno)
1758
if max_chars is not None:
1759
out.append(self.truncate(
1760
self.short_author(rev), (max_chars + 3) // 4))
1762
out.append(self.short_author(rev))
1763
out.append(self.date_string(rev))
1764
if len(rev.parent_ids) > 1:
1765
out.append('[merge]')
1767
tag_str = '{%s}' % (', '.join(sorted(tags)))
1769
out.append(rev.get_summary())
1770
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1773
class GnuChangelogLogFormatter(LogFormatter):
1775
supports_merge_revisions = True
1776
supports_delta = True
1778
def log_revision(self, revision):
1779
"""Log a revision, either merged or not."""
1780
to_file = self.to_file
1782
date_str = format_date(revision.rev.timestamp,
1783
revision.rev.timezone or 0,
1785
date_fmt='%Y-%m-%d',
1787
committer_str = self.authors(revision.rev, 'first', sep=', ')
1788
committer_str = committer_str.replace(' <', ' <')
1789
to_file.write('%s %s\n\n' % (date_str, committer_str))
1791
if revision.delta is not None and revision.delta.has_changed():
1792
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1793
if c.path[0] is None:
1797
to_file.write('\t* %s:\n' % (path,))
1798
for c in revision.delta.renamed + revision.delta.copied:
1799
# For renamed files, show both the old and the new path
1800
to_file.write('\t* %s:\n\t* %s:\n' % (c.path[0], c.path[1]))
1803
if not revision.rev.message:
1804
to_file.write('\tNo commit message\n')
1806
message = revision.rev.message.rstrip('\r\n')
1807
for l in message.split('\n'):
1808
to_file.write('\t%s\n' % (l.lstrip(),))
1812
def line_log(rev, max_chars):
1813
lf = LineLogFormatter(None)
1814
return lf.log_string(None, rev, max_chars)
1817
class LogFormatterRegistry(registry.Registry):
1818
"""Registry for log formatters"""
1820
def make_formatter(self, name, *args, **kwargs):
1821
"""Construct a formatter from arguments.
1823
:param name: Name of the formatter to construct. 'short', 'long' and
1824
'line' are built-in.
1826
return self.get(name)(*args, **kwargs)
1828
def get_default(self, branch):
1829
c = branch.get_config_stack()
1830
return self.get(c.get('log_format'))
1833
log_formatter_registry = LogFormatterRegistry()
1836
log_formatter_registry.register('short', ShortLogFormatter,
1837
'Moderately short log format.')
1838
log_formatter_registry.register('long', LongLogFormatter,
1839
'Detailed log format.')
1840
log_formatter_registry.register('line', LineLogFormatter,
1841
'Log format with one line per revision.')
1842
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1843
'Format used by GNU ChangeLog files.')
1846
def register_formatter(name, formatter):
1847
log_formatter_registry.register(name, formatter)
1850
def log_formatter(name, *args, **kwargs):
1851
"""Construct a formatter from arguments.
1853
name -- Name of the formatter to construct; currently 'long', 'short' and
1854
'line' are supported.
1857
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1859
raise errors.BzrCommandError(
1860
gettext("unknown log formatter: %r") % name)
1863
def author_list_all(rev):
1864
return rev.get_apparent_authors()[:]
1867
def author_list_first(rev):
1868
lst = rev.get_apparent_authors()
1875
def author_list_committer(rev):
1876
return [rev.committer]
1879
author_list_registry = registry.Registry()
1881
author_list_registry.register('all', author_list_all,
1884
author_list_registry.register('first', author_list_first,
1887
author_list_registry.register('committer', author_list_committer,
1891
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1893
"""Show the change in revision history comparing the old revision history to the new one.
1895
:param branch: The branch where the revisions exist
1896
:param old_rh: The old revision history
1897
:param new_rh: The new revision history
1898
:param to_file: A file to write the results to. If None, stdout will be used
1901
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1903
lf = log_formatter(log_format,
1906
show_timezone='original')
1908
# This is the first index which is different between
1911
for i in range(max(len(new_rh), len(old_rh))):
1912
if (len(new_rh) <= i
1914
or new_rh[i] != old_rh[i]):
1918
if base_idx is None:
1919
to_file.write('Nothing seems to have changed\n')
1921
# TODO: It might be nice to do something like show_log
1922
# and show the merged entries. But since this is the
1923
# removed revisions, it shouldn't be as important
1924
if base_idx < len(old_rh):
1925
to_file.write('*' * 60)
1926
to_file.write('\nRemoved Revisions:\n')
1927
for i in range(base_idx, len(old_rh)):
1928
rev = branch.repository.get_revision(old_rh[i])
1929
lr = LogRevision(rev, i + 1, 0, None)
1931
to_file.write('*' * 60)
1932
to_file.write('\n\n')
1933
if base_idx < len(new_rh):
1934
to_file.write('Added Revisions:\n')
1938
direction='forward',
1939
start_revision=base_idx + 1,
1940
end_revision=len(new_rh),
1944
def get_history_change(old_revision_id, new_revision_id, repository):
1945
"""Calculate the uncommon lefthand history between two revisions.
1947
:param old_revision_id: The original revision id.
1948
:param new_revision_id: The new revision id.
1949
:param repository: The repository to use for the calculation.
1951
return old_history, new_history
1954
old_revisions = set()
1956
new_revisions = set()
1957
graph = repository.get_graph()
1958
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1959
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1960
stop_revision = None
1963
while do_new or do_old:
1966
new_revision = next(new_iter)
1967
except StopIteration:
1970
new_history.append(new_revision)
1971
new_revisions.add(new_revision)
1972
if new_revision in old_revisions:
1973
stop_revision = new_revision
1977
old_revision = next(old_iter)
1978
except StopIteration:
1981
old_history.append(old_revision)
1982
old_revisions.add(old_revision)
1983
if old_revision in new_revisions:
1984
stop_revision = old_revision
1986
new_history.reverse()
1987
old_history.reverse()
1988
if stop_revision is not None:
1989
new_history = new_history[new_history.index(stop_revision) + 1:]
1990
old_history = old_history[old_history.index(stop_revision) + 1:]
1991
return old_history, new_history
1994
def show_branch_change(branch, output, old_revno, old_revision_id):
1995
"""Show the changes made to a branch.
1997
:param branch: The branch to show changes about.
1998
:param output: A file-like object to write changes to.
1999
:param old_revno: The revno of the old tip.
2000
:param old_revision_id: The revision_id of the old tip.
2002
new_revno, new_revision_id = branch.last_revision_info()
2003
old_history, new_history = get_history_change(old_revision_id,
2006
if old_history == [] and new_history == []:
2007
output.write('Nothing seems to have changed\n')
2010
log_format = log_formatter_registry.get_default(branch)
2011
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2012
if old_history != []:
2013
output.write('*' * 60)
2014
output.write('\nRemoved Revisions:\n')
2015
show_flat_log(branch.repository, old_history, old_revno, lf)
2016
output.write('*' * 60)
2017
output.write('\n\n')
2018
if new_history != []:
2019
output.write('Added Revisions:\n')
2020
start_revno = new_revno - len(new_history) + 1
2021
show_log(branch, lf, verbose=False, direction='forward',
2022
start_revision=start_revno)
2025
def show_flat_log(repository, history, last_revno, lf):
2026
"""Show a simple log of the specified history.
2028
:param repository: The repository to retrieve revisions from.
2029
:param history: A list of revision_ids indicating the lefthand history.
2030
:param last_revno: The revno of the last revision_id in the history.
2031
:param lf: The log formatter to use.
2033
revisions = repository.get_revisions(history)
2034
for i, rev in enumerate(revisions):
2035
lr = LogRevision(rev, i + last_revno, 0, None)
2039
def _get_info_for_log_files(revisionspec_list, file_list, exit_stack):
2040
"""Find file-ids and kinds given a list of files and a revision range.
2042
We search for files at the end of the range. If not found there,
2043
we try the start of the range.
2045
:param revisionspec_list: revision range as parsed on the command line
2046
:param file_list: the list of paths given on the command line;
2047
the first of these can be a branch location or a file path,
2048
the remainder must be file paths
2049
:param exit_stack: When the branch returned is read locked,
2050
an unlock call will be queued to the exit stack.
2051
:return: (branch, info_list, start_rev_info, end_rev_info) where
2052
info_list is a list of (relative_path, file_id, kind) tuples where
2053
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2054
branch will be read-locked.
2056
from breezy.builtins import _get_revision_range
2057
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2059
exit_stack.enter_context(b.lock_read())
2060
# XXX: It's damn messy converting a list of paths to relative paths when
2061
# those paths might be deleted ones, they might be on a case-insensitive
2062
# filesystem and/or they might be in silly locations (like another branch).
2063
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2064
# file2 implicitly in the same dir as file1 or should its directory be
2065
# taken from the current tree somehow?) For now, this solves the common
2066
# case of running log in a nested directory, assuming paths beyond the
2067
# first one haven't been deleted ...
2069
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2071
relpaths = [path] + file_list[1:]
2073
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2075
if relpaths in ([], [u'']):
2076
return b, [], start_rev_info, end_rev_info
2077
if start_rev_info is None and end_rev_info is None:
2079
tree = b.basis_tree()
2082
file_id = tree.path2id(fp)
2083
kind = _get_kind_for_file_id(tree, fp, file_id)
2085
# go back to when time began
2088
rev1 = b.get_rev_id(1)
2089
except errors.NoSuchRevision:
2094
tree1 = b.repository.revision_tree(rev1)
2096
file_id = tree1.path2id(fp)
2097
kind = _get_kind_for_file_id(tree1, fp, file_id)
2098
info_list.append((fp, file_id, kind))
2100
elif start_rev_info == end_rev_info:
2101
# One revision given - file must exist in it
2102
tree = b.repository.revision_tree(end_rev_info.rev_id)
2104
file_id = tree.path2id(fp)
2105
kind = _get_kind_for_file_id(tree, fp, file_id)
2106
info_list.append((fp, file_id, kind))
2109
# Revision range given. Get the file-id from the end tree.
2110
# If that fails, try the start tree.
2111
rev_id = end_rev_info.rev_id
2113
tree = b.basis_tree()
2115
tree = b.repository.revision_tree(rev_id)
2118
file_id = tree.path2id(fp)
2119
kind = _get_kind_for_file_id(tree, fp, file_id)
2122
rev_id = start_rev_info.rev_id
2124
rev1 = b.get_rev_id(1)
2125
tree1 = b.repository.revision_tree(rev1)
2127
tree1 = b.repository.revision_tree(rev_id)
2128
file_id = tree1.path2id(fp)
2129
kind = _get_kind_for_file_id(tree1, fp, file_id)
2130
info_list.append((fp, file_id, kind))
2131
return b, info_list, start_rev_info, end_rev_info
2134
def _get_kind_for_file_id(tree, path, file_id):
2135
"""Return the kind of a file-id or None if it doesn't exist."""
2136
if file_id is not None:
2137
return tree.kind(path)
2142
properties_handler_registry = registry.Registry()
2144
# Use the properties handlers to print out bug information if available
2147
def _bugs_properties_handler(revision):
2149
related_bug_urls = []
2150
for bug_url, status in revision.iter_bugs():
2151
if status == 'fixed':
2152
fixed_bug_urls.append(bug_url)
2153
elif status == 'related':
2154
related_bug_urls.append(bug_url)
2157
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2158
ret[text] = ' '.join(fixed_bug_urls)
2159
if related_bug_urls:
2160
text = ngettext('related bug', 'related bugs',
2161
len(related_bug_urls))
2162
ret[text] = ' '.join(related_bug_urls)
2166
properties_handler_registry.register('bugs_properties_handler',
2167
_bugs_properties_handler)
2170
# adapters which revision ids to log are filtered. When log is called, the
2171
# log_rev_iterator is adapted through each of these factory methods.
2172
# Plugins are welcome to mutate this list in any way they like - as long
2173
# as the overall behaviour is preserved. At this point there is no extensible
2174
# mechanism for getting parameters to each factory method, and until there is
2175
# this won't be considered a stable api.
2179
# read revision objects
2180
_make_revision_objects,
2181
# filter on log messages
2182
_make_search_filter,
2183
# generate deltas for things we will show