1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
69
revision as _mod_revision,
71
from breezy.i18n import gettext, ngettext
80
from .osutils import (
82
format_date_with_offset_in_original_timezone,
83
get_diff_header_encoding,
84
get_terminal_encoding,
92
from .tree import find_previous_path
95
def find_touching_revisions(repository, last_revision, last_tree, last_path):
96
"""Yield a description of revisions which affect the file_id.
98
Each returned element is (revno, revision_id, description)
100
This is the list of revisions where the file is either added,
101
modified, renamed or deleted.
103
TODO: Perhaps some way to limit this to only particular revisions,
104
or to traverse a non-mainline set of revisions?
106
last_verifier = last_tree.get_file_verifier(last_path)
107
graph = repository.get_graph()
108
history = list(graph.iter_lefthand_ancestry(last_revision, []))
110
for revision_id in history:
111
this_tree = repository.revision_tree(revision_id)
112
this_path = find_previous_path(last_tree, this_tree, last_path)
114
# now we know how it was last time, and how it is in this revision.
115
# are those two states effectively the same or not?
116
if this_path is not None and last_path is None:
117
yield revno, revision_id, "deleted " + this_path
118
this_verifier = this_tree.get_file_verifier(this_path)
119
elif this_path is None and last_path is not None:
120
yield revno, revision_id, "added " + last_path
121
elif this_path != last_path:
122
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
123
this_verifier = this_tree.get_file_verifier(this_path)
125
this_verifier = this_tree.get_file_verifier(this_path)
126
if (this_verifier != last_verifier):
127
yield revno, revision_id, "modified " + this_path
129
last_verifier = this_verifier
130
last_path = this_path
131
last_tree = this_tree
132
if last_path is None:
147
"""Write out human-readable log of commits to this branch.
149
This function is being retained for backwards compatibility but
150
should not be extended with new parameters. Use the new Logger class
151
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
152
make_log_request_dict function.
154
:param lf: The LogFormatter object showing the output.
156
:param verbose: If True show added/changed/deleted/renamed files.
158
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
161
:param start_revision: If not None, only show revisions >= start_revision
163
:param end_revision: If not None, only show revisions <= end_revision
165
:param search: If not None, only show revisions with matching commit
168
:param limit: If set, shows only 'limit' revisions, all revisions are shown
171
:param show_diff: If True, output a diff after each revision.
173
:param match: Dictionary of search lists to use when matching revision
185
if isinstance(start_revision, int):
187
start_revision = revisionspec.RevisionInfo(branch, start_revision)
188
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
189
raise errors.InvalidRevisionNumber(start_revision)
191
if isinstance(end_revision, int):
193
end_revision = revisionspec.RevisionInfo(branch, end_revision)
194
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
195
raise errors.InvalidRevisionNumber(end_revision)
197
if end_revision is not None and end_revision.revno == 0:
198
raise errors.InvalidRevisionNumber(end_revision.revno)
200
# Build the request and execute it
201
rqst = make_log_request_dict(
203
start_revision=start_revision, end_revision=end_revision,
204
limit=limit, message_search=search,
205
delta_type=delta_type, diff_type=diff_type)
206
Logger(branch, rqst).show(lf)
209
# Note: This needs to be kept in sync with the defaults in
210
# make_log_request_dict() below
211
_DEFAULT_REQUEST_PARAMS = {
212
'direction': 'reverse',
214
'generate_tags': True,
215
'exclude_common_ancestry': False,
216
'_match_using_deltas': True,
220
def make_log_request_dict(direction='reverse', specific_fileids=None,
221
start_revision=None, end_revision=None, limit=None,
222
message_search=None, levels=None, generate_tags=True,
224
diff_type=None, _match_using_deltas=True,
225
exclude_common_ancestry=False, match=None,
226
signature=False, omit_merges=False,
228
"""Convenience function for making a logging request dictionary.
230
Using this function may make code slightly safer by ensuring
231
parameters have the correct names. It also provides a reference
232
point for documenting the supported parameters.
234
:param direction: 'reverse' (default) is latest to earliest;
235
'forward' is earliest to latest.
237
:param specific_fileids: If not None, only include revisions
238
affecting the specified files, rather than all revisions.
240
:param start_revision: If not None, only generate
241
revisions >= start_revision
243
:param end_revision: If not None, only generate
244
revisions <= end_revision
246
:param limit: If set, generate only 'limit' revisions, all revisions
247
are shown if None or 0.
249
:param message_search: If not None, only include revisions with
250
matching commit messages
252
:param levels: the number of levels of revisions to
253
generate; 1 for just the mainline; 0 for all levels, or None for
256
:param generate_tags: If True, include tags for matched revisions.
258
:param delta_type: Either 'full', 'partial' or None.
259
'full' means generate the complete delta - adds/deletes/modifies/etc;
260
'partial' means filter the delta using specific_fileids;
261
None means do not generate any delta.
263
:param diff_type: Either 'full', 'partial' or None.
264
'full' means generate the complete diff - adds/deletes/modifies/etc;
265
'partial' means filter the diff using specific_fileids;
266
None means do not generate any diff.
268
:param _match_using_deltas: a private parameter controlling the
269
algorithm used for matching specific_fileids. This parameter
270
may be removed in the future so breezy client code should NOT
273
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
274
range operator or as a graph difference.
276
:param signature: show digital signature information
278
:param match: Dictionary of list of search strings to use when filtering
279
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
280
the empty string to match any of the preceding properties.
282
:param omit_merges: If True, commits with more than one parent are
286
# Take care of old style message_search parameter
289
if 'message' in match:
290
match['message'].append(message_search)
292
match['message'] = [message_search]
294
match = {'message': [message_search]}
296
'direction': direction,
297
'specific_fileids': specific_fileids,
298
'start_revision': start_revision,
299
'end_revision': end_revision,
302
'generate_tags': generate_tags,
303
'delta_type': delta_type,
304
'diff_type': diff_type,
305
'exclude_common_ancestry': exclude_common_ancestry,
306
'signature': signature,
308
'omit_merges': omit_merges,
309
# Add 'private' attributes for features that may be deprecated
310
'_match_using_deltas': _match_using_deltas,
314
def _apply_log_request_defaults(rqst):
315
"""Apply default values to a request dictionary."""
316
result = _DEFAULT_REQUEST_PARAMS.copy()
322
def format_signature_validity(rev_id, branch):
323
"""get the signature validity
325
:param rev_id: revision id to validate
326
:param branch: branch of revision
327
:return: human readable string to print to log
329
from breezy import gpg
331
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
332
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
333
if result[0] == gpg.SIGNATURE_VALID:
334
return u"valid signature from {0}".format(result[1])
335
if result[0] == gpg.SIGNATURE_KEY_MISSING:
336
return "unknown key {0}".format(result[1])
337
if result[0] == gpg.SIGNATURE_NOT_VALID:
338
return "invalid signature!"
339
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
340
return "no signature"
343
class LogGenerator(object):
344
"""A generator of log revisions."""
346
def iter_log_revisions(self):
347
"""Iterate over LogRevision objects.
349
:return: An iterator yielding LogRevision objects.
351
raise NotImplementedError(self.iter_log_revisions)
354
class Logger(object):
355
"""An object that generates, formats and displays a log."""
357
def __init__(self, branch, rqst):
360
:param branch: the branch to log
361
:param rqst: A dictionary specifying the query parameters.
362
See make_log_request_dict() for supported values.
365
self.rqst = _apply_log_request_defaults(rqst)
370
:param lf: The LogFormatter object to send the output to.
372
if not isinstance(lf, LogFormatter):
373
warn("not a LogFormatter instance: %r" % lf)
375
with self.branch.lock_read():
376
if getattr(lf, 'begin_log', None):
379
if getattr(lf, 'end_log', None):
382
def _show_body(self, lf):
383
"""Show the main log output.
385
Subclasses may wish to override this.
387
# Tweak the LogRequest based on what the LogFormatter can handle.
388
# (There's no point generating stuff if the formatter can't display it.)
390
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
391
# user didn't specify levels, use whatever the LF can handle:
392
rqst['levels'] = lf.get_levels()
394
if not getattr(lf, 'supports_tags', False):
395
rqst['generate_tags'] = False
396
if not getattr(lf, 'supports_delta', False):
397
rqst['delta_type'] = None
398
if not getattr(lf, 'supports_diff', False):
399
rqst['diff_type'] = None
400
if not getattr(lf, 'supports_signatures', False):
401
rqst['signature'] = False
403
# Find and print the interesting revisions
404
generator = self._generator_factory(self.branch, rqst)
406
for lr in generator.iter_log_revisions():
408
except errors.GhostRevisionUnusableHere:
409
raise errors.BzrCommandError(
410
gettext('Further revision history missing.'))
413
def _generator_factory(self, branch, rqst):
414
"""Make the LogGenerator object to use.
416
Subclasses may wish to override this.
418
return _DefaultLogGenerator(branch, rqst)
421
class _StartNotLinearAncestor(Exception):
422
"""Raised when a start revision is not found walking left-hand history."""
425
class _DefaultLogGenerator(LogGenerator):
426
"""The default generator of log revisions."""
428
def __init__(self, branch, rqst):
431
if rqst.get('generate_tags') and branch.supports_tags():
432
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
434
self.rev_tag_dict = {}
436
def iter_log_revisions(self):
437
"""Iterate over LogRevision objects.
439
:return: An iterator yielding LogRevision objects.
442
levels = rqst.get('levels')
443
limit = rqst.get('limit')
444
diff_type = rqst.get('diff_type')
445
show_signature = rqst.get('signature')
446
omit_merges = rqst.get('omit_merges')
448
revision_iterator = self._create_log_revision_iterator()
449
for revs in revision_iterator:
450
for (rev_id, revno, merge_depth), rev, delta in revs:
451
# 0 levels means show everything; merge_depth counts from 0
452
if (levels != 0 and merge_depth is not None and
453
merge_depth >= levels):
455
if omit_merges and len(rev.parent_ids) > 1:
458
raise errors.GhostRevisionUnusableHere(rev_id)
459
if diff_type is None:
462
diff = self._format_diff(rev, rev_id, diff_type)
464
signature = format_signature_validity(rev_id, self.branch)
468
rev, revno, merge_depth, delta,
469
self.rev_tag_dict.get(rev_id), diff, signature)
472
if log_count >= limit:
475
def _format_diff(self, rev, rev_id, diff_type):
476
repo = self.branch.repository
477
if len(rev.parent_ids) == 0:
478
ancestor_id = _mod_revision.NULL_REVISION
480
ancestor_id = rev.parent_ids[0]
481
tree_1 = repo.revision_tree(ancestor_id)
482
tree_2 = repo.revision_tree(rev_id)
483
file_ids = self.rqst.get('specific_fileids')
484
if diff_type == 'partial' and file_ids is not None:
485
specific_files = [tree_2.id2path(id) for id in file_ids]
487
specific_files = None
489
path_encoding = get_diff_header_encoding()
490
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
491
new_label='', path_encoding=path_encoding)
494
def _create_log_revision_iterator(self):
495
"""Create a revision iterator for log.
497
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
500
self.start_rev_id, self.end_rev_id = _get_revision_limits(
501
self.branch, self.rqst.get('start_revision'),
502
self.rqst.get('end_revision'))
503
if self.rqst.get('_match_using_deltas'):
504
return self._log_revision_iterator_using_delta_matching()
506
# We're using the per-file-graph algorithm. This scales really
507
# well but only makes sense if there is a single file and it's
509
file_count = len(self.rqst.get('specific_fileids'))
511
raise errors.BzrError(
512
"illegal LogRequest: must match-using-deltas "
513
"when logging %d files" % file_count)
514
return self._log_revision_iterator_using_per_file_graph()
516
def _log_revision_iterator_using_delta_matching(self):
517
# Get the base revisions, filtering by the revision range
519
generate_merge_revisions = rqst.get('levels') != 1
520
delayed_graph_generation = not rqst.get('specific_fileids') and (
521
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
522
view_revisions = _calc_view_revisions(
523
self.branch, self.start_rev_id, self.end_rev_id,
524
rqst.get('direction'),
525
generate_merge_revisions=generate_merge_revisions,
526
delayed_graph_generation=delayed_graph_generation,
527
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
529
# Apply the other filters
530
return make_log_rev_iterator(self.branch, view_revisions,
531
rqst.get('delta_type'), rqst.get('match'),
532
file_ids=rqst.get('specific_fileids'),
533
direction=rqst.get('direction'))
535
def _log_revision_iterator_using_per_file_graph(self):
536
# Get the base revisions, filtering by the revision range.
537
# Note that we always generate the merge revisions because
538
# filter_revisions_touching_file_id() requires them ...
540
view_revisions = _calc_view_revisions(
541
self.branch, self.start_rev_id, self.end_rev_id,
542
rqst.get('direction'), generate_merge_revisions=True,
543
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
544
if not isinstance(view_revisions, list):
545
view_revisions = list(view_revisions)
546
view_revisions = _filter_revisions_touching_file_id(self.branch,
547
rqst.get('specific_fileids')[
549
include_merges=rqst.get('levels') != 1)
550
return make_log_rev_iterator(self.branch, view_revisions,
551
rqst.get('delta_type'), rqst.get('match'))
554
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
555
generate_merge_revisions,
556
delayed_graph_generation=False,
557
exclude_common_ancestry=False,
559
"""Calculate the revisions to view.
561
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
562
a list of the same tuples.
564
if (exclude_common_ancestry and start_rev_id == end_rev_id):
565
raise errors.BzrCommandError(gettext(
566
'--exclude-common-ancestry requires two different revisions'))
567
if direction not in ('reverse', 'forward'):
568
raise ValueError(gettext('invalid direction %r') % direction)
569
br_rev_id = branch.last_revision()
570
if br_rev_id == _mod_revision.NULL_REVISION:
573
if (end_rev_id and start_rev_id == end_rev_id
574
and (not generate_merge_revisions
575
or not _has_merges(branch, end_rev_id))):
576
# If a single revision is requested, check we can handle it
577
return _generate_one_revision(branch, end_rev_id, br_rev_id,
579
if not generate_merge_revisions:
581
# If we only want to see linear revisions, we can iterate ...
582
iter_revs = _linear_view_revisions(
583
branch, start_rev_id, end_rev_id,
584
exclude_common_ancestry=exclude_common_ancestry)
585
# If a start limit was given and it's not obviously an
586
# ancestor of the end limit, check it before outputting anything
587
if (direction == 'forward'
588
or (start_rev_id and not _is_obvious_ancestor(
589
branch, start_rev_id, end_rev_id))):
590
iter_revs = list(iter_revs)
591
if direction == 'forward':
592
iter_revs = reversed(iter_revs)
594
except _StartNotLinearAncestor:
595
# Switch to the slower implementation that may be able to find a
596
# non-obvious ancestor out of the left-hand history.
598
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
599
direction, delayed_graph_generation,
600
exclude_common_ancestry)
601
if direction == 'forward':
602
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
606
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
607
if rev_id == br_rev_id:
609
return [(br_rev_id, br_revno, 0)]
611
revno_str = _compute_revno_str(branch, rev_id)
612
return [(rev_id, revno_str, 0)]
615
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
616
delayed_graph_generation,
617
exclude_common_ancestry=False):
618
# On large trees, generating the merge graph can take 30-60 seconds
619
# so we delay doing it until a merge is detected, incrementally
620
# returning initial (non-merge) revisions while we can.
622
# The above is only true for old formats (<= 0.92), for newer formats, a
623
# couple of seconds only should be needed to load the whole graph and the
624
# other graph operations needed are even faster than that -- vila 100201
625
initial_revisions = []
626
if delayed_graph_generation:
628
for rev_id, revno, depth in _linear_view_revisions(
629
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
630
if _has_merges(branch, rev_id):
631
# The end_rev_id can be nested down somewhere. We need an
632
# explicit ancestry check. There is an ambiguity here as we
633
# may not raise _StartNotLinearAncestor for a revision that
634
# is an ancestor but not a *linear* one. But since we have
635
# loaded the graph to do the check (or calculate a dotted
636
# revno), we may as well accept to show the log... We need
637
# the check only if start_rev_id is not None as all
638
# revisions have _mod_revision.NULL_REVISION as an ancestor
640
graph = branch.repository.get_graph()
641
if (start_rev_id is not None
642
and not graph.is_ancestor(start_rev_id, end_rev_id)):
643
raise _StartNotLinearAncestor()
644
# Since we collected the revisions so far, we need to
649
initial_revisions.append((rev_id, revno, depth))
651
# No merged revisions found
652
return initial_revisions
653
except _StartNotLinearAncestor:
654
# A merge was never detected so the lower revision limit can't
655
# be nested down somewhere
656
raise errors.BzrCommandError(gettext('Start revision not found in'
657
' history of end revision.'))
659
# We exit the loop above because we encounter a revision with merges, from
660
# this revision, we need to switch to _graph_view_revisions.
662
# A log including nested merges is required. If the direction is reverse,
663
# we rebase the initial merge depths so that the development line is
664
# shown naturally, i.e. just like it is for linear logging. We can easily
665
# make forward the exact opposite display, but showing the merge revisions
666
# indented at the end seems slightly nicer in that case.
667
view_revisions = itertools.chain(iter(initial_revisions),
668
_graph_view_revisions(branch, start_rev_id, end_rev_id,
669
rebase_initial_depths=(
670
direction == 'reverse'),
671
exclude_common_ancestry=exclude_common_ancestry))
672
return view_revisions
675
def _has_merges(branch, rev_id):
676
"""Does a revision have multiple parents or not?"""
677
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
678
return len(parents) > 1
681
def _compute_revno_str(branch, rev_id):
682
"""Compute the revno string from a rev_id.
684
:return: The revno string, or None if the revision is not in the supplied
688
revno = branch.revision_id_to_dotted_revno(rev_id)
689
except errors.NoSuchRevision:
690
# The revision must be outside of this branch
693
return '.'.join(str(n) for n in revno)
696
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
697
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
698
if start_rev_id and end_rev_id:
700
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
701
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
702
except errors.NoSuchRevision:
703
# one or both is not in the branch; not obvious
705
if len(start_dotted) == 1 and len(end_dotted) == 1:
707
return start_dotted[0] <= end_dotted[0]
708
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
709
start_dotted[0:1] == end_dotted[0:1]):
710
# both on same development line
711
return start_dotted[2] <= end_dotted[2]
715
# if either start or end is not specified then we use either the first or
716
# the last revision and *they* are obvious ancestors.
720
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
721
exclude_common_ancestry=False):
722
"""Calculate a sequence of revisions to view, newest to oldest.
724
:param start_rev_id: the lower revision-id
725
:param end_rev_id: the upper revision-id
726
:param exclude_common_ancestry: Whether the start_rev_id should be part of
727
the iterated revisions.
728
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
729
dotted_revno will be None for ghosts
730
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
731
is not found walking the left-hand history
733
repo = branch.repository
734
graph = repo.get_graph()
735
if start_rev_id is None and end_rev_id is None:
736
if branch._format.stores_revno() or \
737
config.GlobalStack().get('calculate_revnos'):
739
br_revno, br_rev_id = branch.last_revision_info()
740
except errors.GhostRevisionsHaveNoRevno:
741
br_rev_id = branch.last_revision()
746
br_rev_id = branch.last_revision()
749
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
750
(_mod_revision.NULL_REVISION,))
753
revision_id = next(graph_iter)
754
except errors.RevisionNotPresent as e:
756
yield e.revision_id, None, None
758
except StopIteration:
761
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
762
if cur_revno is not None:
765
br_rev_id = branch.last_revision()
766
if end_rev_id is None:
767
end_rev_id = br_rev_id
768
found_start = start_rev_id is None
769
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
770
(_mod_revision.NULL_REVISION,))
773
revision_id = next(graph_iter)
774
except StopIteration:
776
except errors.RevisionNotPresent as e:
778
yield e.revision_id, None, None
781
revno_str = _compute_revno_str(branch, revision_id)
782
if not found_start and revision_id == start_rev_id:
783
if not exclude_common_ancestry:
784
yield revision_id, revno_str, 0
788
yield revision_id, revno_str, 0
790
raise _StartNotLinearAncestor()
793
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
794
rebase_initial_depths=True,
795
exclude_common_ancestry=False):
796
"""Calculate revisions to view including merges, newest to oldest.
798
:param branch: the branch
799
:param start_rev_id: the lower revision-id
800
:param end_rev_id: the upper revision-id
801
:param rebase_initial_depth: should depths be rebased until a mainline
803
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
805
if exclude_common_ancestry:
806
stop_rule = 'with-merges-without-common-ancestry'
808
stop_rule = 'with-merges'
809
view_revisions = branch.iter_merge_sorted_revisions(
810
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
812
if not rebase_initial_depths:
813
for (rev_id, merge_depth, revno, end_of_merge
815
yield rev_id, '.'.join(map(str, revno)), merge_depth
817
# We're following a development line starting at a merged revision.
818
# We need to adjust depths down by the initial depth until we find
819
# a depth less than it. Then we use that depth as the adjustment.
820
# If and when we reach the mainline, depth adjustment ends.
821
depth_adjustment = None
822
for (rev_id, merge_depth, revno, end_of_merge
824
if depth_adjustment is None:
825
depth_adjustment = merge_depth
827
if merge_depth < depth_adjustment:
828
# From now on we reduce the depth adjustement, this can be
829
# surprising for users. The alternative requires two passes
830
# which breaks the fast display of the first revision
832
depth_adjustment = merge_depth
833
merge_depth -= depth_adjustment
834
yield rev_id, '.'.join(map(str, revno)), merge_depth
837
def _rebase_merge_depth(view_revisions):
838
"""Adjust depths upwards so the top level is 0."""
839
# If either the first or last revision have a merge_depth of 0, we're done
840
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
841
min_depth = min([d for r, n, d in view_revisions])
843
view_revisions = [(r, n, d - min_depth)
844
for r, n, d in view_revisions]
845
return view_revisions
848
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
849
file_ids=None, direction='reverse'):
850
"""Create a revision iterator for log.
852
:param branch: The branch being logged.
853
:param view_revisions: The revisions being viewed.
854
:param generate_delta: Whether to generate a delta for each revision.
855
Permitted values are None, 'full' and 'partial'.
856
:param search: A user text search string.
857
:param file_ids: If non empty, only revisions matching one or more of
858
the file-ids are to be kept.
859
:param direction: the direction in which view_revisions is sorted
860
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
863
# Convert view_revisions into (view, None, None) groups to fit with
864
# the standard interface here.
865
if isinstance(view_revisions, list):
866
# A single batch conversion is faster than many incremental ones.
867
# As we have all the data, do a batch conversion.
868
nones = [None] * len(view_revisions)
869
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
872
for view in view_revisions:
873
yield (view, None, None)
874
log_rev_iterator = iter([_convert()])
875
for adapter in log_adapters:
876
# It would be nicer if log adapters were first class objects
877
# with custom parameters. This will do for now. IGC 20090127
878
if adapter == _make_delta_filter:
879
log_rev_iterator = adapter(
880
branch, generate_delta, search, log_rev_iterator, file_ids,
883
log_rev_iterator = adapter(
884
branch, generate_delta, search, log_rev_iterator)
885
return log_rev_iterator
888
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
889
"""Create a filtered iterator of log_rev_iterator matching on a regex.
891
:param branch: The branch being logged.
892
:param generate_delta: Whether to generate a delta for each revision.
893
:param match: A dictionary with properties as keys and lists of strings
894
as values. To match, a revision may match any of the supplied strings
895
within a single property but must match at least one string for each
897
:param log_rev_iterator: An input iterator containing all revisions that
898
could be displayed, in lists.
899
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
903
return log_rev_iterator
904
# Use lazy_compile so mapping to InvalidPattern error occurs.
905
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
906
for k, v in match.items()]
907
return _filter_re(searchRE, log_rev_iterator)
910
def _filter_re(searchRE, log_rev_iterator):
911
for revs in log_rev_iterator:
912
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
917
def _match_filter(searchRE, rev):
919
'message': (rev.message,),
920
'committer': (rev.committer,),
921
'author': (rev.get_apparent_authors()),
922
'bugs': list(rev.iter_bugs())
924
strings[''] = [item for inner_list in strings.values()
925
for item in inner_list]
926
for k, v in searchRE:
927
if k in strings and not _match_any_filter(strings[k], v):
932
def _match_any_filter(strings, res):
933
return any(r.search(s) for r in res for s in strings)
936
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
937
fileids=None, direction='reverse'):
938
"""Add revision deltas to a log iterator if needed.
940
:param branch: The branch being logged.
941
:param generate_delta: Whether to generate a delta for each revision.
942
Permitted values are None, 'full' and 'partial'.
943
:param search: A user text search string.
944
:param log_rev_iterator: An input iterator containing all revisions that
945
could be displayed, in lists.
946
:param fileids: If non empty, only revisions matching one or more of
947
the file-ids are to be kept.
948
:param direction: the direction in which view_revisions is sorted
949
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
952
if not generate_delta and not fileids:
953
return log_rev_iterator
954
return _generate_deltas(branch.repository, log_rev_iterator,
955
generate_delta, fileids, direction)
958
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
960
"""Create deltas for each batch of revisions in log_rev_iterator.
962
If we're only generating deltas for the sake of filtering against
963
file-ids, we stop generating deltas once all file-ids reach the
964
appropriate life-cycle point. If we're receiving data newest to
965
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
967
check_fileids = fileids is not None and len(fileids) > 0
969
fileid_set = set(fileids)
970
if direction == 'reverse':
976
for revs in log_rev_iterator:
977
# If we were matching against fileids and we've run out,
978
# there's nothing left to do
979
if check_fileids and not fileid_set:
981
revisions = [rev[1] for rev in revs]
983
if delta_type == 'full' and not check_fileids:
984
deltas = repository.get_deltas_for_revisions(revisions)
985
for rev, delta in zip(revs, deltas):
986
new_revs.append((rev[0], rev[1], delta))
988
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
989
for rev, delta in zip(revs, deltas):
991
if delta is None or not delta.has_changed():
994
_update_fileids(delta, fileid_set, stop_on)
995
if delta_type is None:
997
elif delta_type == 'full':
998
# If the file matches all the time, rebuilding
999
# a full delta like this in addition to a partial
1000
# one could be slow. However, it's likely that
1001
# most revisions won't get this far, making it
1002
# faster to filter on the partial deltas and
1003
# build the occasional full delta than always
1004
# building full deltas and filtering those.
1006
delta = repository.get_revision_delta(rev_id)
1007
new_revs.append((rev[0], rev[1], delta))
1011
def _update_fileids(delta, fileids, stop_on):
1012
"""Update the set of file-ids to search based on file lifecycle events.
1014
:param fileids: a set of fileids to update
1015
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1016
fileids set once their add or remove entry is detected respectively
1018
if stop_on == 'add':
1019
for item in delta.added + delta.copied:
1020
if item.file_id in fileids:
1021
fileids.remove(item.file_id)
1022
elif stop_on == 'delete':
1023
for item in delta.removed:
1024
if item.file_id in fileids:
1025
fileids.remove(item.file_id)
1028
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1029
"""Extract revision objects from the repository
1031
:param branch: The branch being logged.
1032
:param generate_delta: Whether to generate a delta for each revision.
1033
:param search: A user text search string.
1034
:param log_rev_iterator: An input iterator containing all revisions that
1035
could be displayed, in lists.
1036
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1039
repository = branch.repository
1040
for revs in log_rev_iterator:
1041
# r = revision_id, n = revno, d = merge depth
1042
revision_ids = [view[0] for view, _, _ in revs]
1043
revisions = dict(repository.iter_revisions(revision_ids))
1044
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1047
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1048
"""Group up a single large batch into smaller ones.
1050
:param branch: The branch being logged.
1051
:param generate_delta: Whether to generate a delta for each revision.
1052
:param search: A user text search string.
1053
:param log_rev_iterator: An input iterator containing all revisions that
1054
could be displayed, in lists.
1055
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1059
for batch in log_rev_iterator:
1062
step = [detail for _, detail in zip(range(num), batch)]
1066
num = min(int(num * 1.5), 200)
1069
def _get_revision_limits(branch, start_revision, end_revision):
1070
"""Get and check revision limits.
1072
:param branch: The branch containing the revisions.
1074
:param start_revision: The first revision to be logged.
1075
but for merge revision support a RevisionInfo is expected.
1077
:param end_revision: The last revision to be logged.
1078
For backwards compatibility this may be a mainline integer revno,
1079
but for merge revision support a RevisionInfo is expected.
1081
:return: (start_rev_id, end_rev_id) tuple.
1085
if start_revision is not None:
1086
if not isinstance(start_revision, revisionspec.RevisionInfo):
1087
raise TypeError(start_revision)
1088
start_rev_id = start_revision.rev_id
1089
start_revno = start_revision.revno
1090
if start_revno is None:
1095
if end_revision is not None:
1096
if not isinstance(end_revision, revisionspec.RevisionInfo):
1097
raise TypeError(start_revision)
1098
end_rev_id = end_revision.rev_id
1099
end_revno = end_revision.revno
1101
if branch.last_revision() != _mod_revision.NULL_REVISION:
1102
if (start_rev_id == _mod_revision.NULL_REVISION
1103
or end_rev_id == _mod_revision.NULL_REVISION):
1104
raise errors.BzrCommandError(
1105
gettext('Logging revision 0 is invalid.'))
1106
if end_revno is not None and start_revno > end_revno:
1107
raise errors.BzrCommandError(
1108
gettext("Start revision must be older than the end revision."))
1109
return (start_rev_id, end_rev_id)
1112
def _get_mainline_revs(branch, start_revision, end_revision):
1113
"""Get the mainline revisions from the branch.
1115
Generates the list of mainline revisions for the branch.
1117
:param branch: The branch containing the revisions.
1119
:param start_revision: The first revision to be logged.
1120
For backwards compatibility this may be a mainline integer revno,
1121
but for merge revision support a RevisionInfo is expected.
1123
:param end_revision: The last revision to be logged.
1124
For backwards compatibility this may be a mainline integer revno,
1125
but for merge revision support a RevisionInfo is expected.
1127
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1129
branch_revno, branch_last_revision = branch.last_revision_info()
1130
if branch_revno == 0:
1131
return None, None, None, None
1133
# For mainline generation, map start_revision and end_revision to
1134
# mainline revnos. If the revision is not on the mainline choose the
1135
# appropriate extreme of the mainline instead - the extra will be
1137
# Also map the revisions to rev_ids, to be used in the later filtering
1140
if start_revision is None:
1143
if isinstance(start_revision, revisionspec.RevisionInfo):
1144
start_rev_id = start_revision.rev_id
1145
start_revno = start_revision.revno or 1
1147
branch.check_real_revno(start_revision)
1148
start_revno = start_revision
1151
if end_revision is None:
1152
end_revno = branch_revno
1154
if isinstance(end_revision, revisionspec.RevisionInfo):
1155
end_rev_id = end_revision.rev_id
1156
end_revno = end_revision.revno or branch_revno
1158
branch.check_real_revno(end_revision)
1159
end_revno = end_revision
1161
if ((start_rev_id == _mod_revision.NULL_REVISION)
1162
or (end_rev_id == _mod_revision.NULL_REVISION)):
1163
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1164
if start_revno > end_revno:
1165
raise errors.BzrCommandError(gettext("Start revision must be older "
1166
"than the end revision."))
1168
if end_revno < start_revno:
1169
return None, None, None, None
1170
cur_revno = branch_revno
1173
graph = branch.repository.get_graph()
1174
for revision_id in graph.iter_lefthand_ancestry(
1175
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1176
if cur_revno < start_revno:
1177
# We have gone far enough, but we always add 1 more revision
1178
rev_nos[revision_id] = cur_revno
1179
mainline_revs.append(revision_id)
1181
if cur_revno <= end_revno:
1182
rev_nos[revision_id] = cur_revno
1183
mainline_revs.append(revision_id)
1186
# We walked off the edge of all revisions, so we add a 'None' marker
1187
mainline_revs.append(None)
1189
mainline_revs.reverse()
1191
# override the mainline to look like the revision history.
1192
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1195
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1196
include_merges=True):
1197
r"""Return the list of revision ids which touch a given file id.
1199
The function filters view_revisions and returns a subset.
1200
This includes the revisions which directly change the file id,
1201
and the revisions which merge these changes. So if the
1214
And 'C' changes a file, then both C and D will be returned. F will not be
1215
returned even though it brings the changes to C into the branch starting
1216
with E. (Note that if we were using F as the tip instead of G, then we
1219
This will also be restricted based on a subset of the mainline.
1221
:param branch: The branch where we can get text revision information.
1223
:param file_id: Filter out revisions that do not touch file_id.
1225
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1226
tuples. This is the list of revisions which will be filtered. It is
1227
assumed that view_revisions is in merge_sort order (i.e. newest
1230
:param include_merges: include merge revisions in the result or not
1232
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1234
# Lookup all possible text keys to determine which ones actually modified
1236
graph = branch.repository.get_file_graph()
1237
get_parent_map = graph.get_parent_map
1238
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1240
# Looking up keys in batches of 1000 can cut the time in half, as well as
1241
# memory consumption. GraphIndex *does* like to look for a few keys in
1242
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1243
# TODO: This code needs to be re-evaluated periodically as we tune the
1244
# indexing layer. We might consider passing in hints as to the known
1245
# access pattern (sparse/clustered, high success rate/low success
1246
# rate). This particular access is clustered with a low success rate.
1247
modified_text_revisions = set()
1249
for start in range(0, len(text_keys), chunk_size):
1250
next_keys = text_keys[start:start + chunk_size]
1251
# Only keep the revision_id portion of the key
1252
modified_text_revisions.update(
1253
[k[1] for k in get_parent_map(next_keys)])
1254
del text_keys, next_keys
1257
# Track what revisions will merge the current revision, replace entries
1258
# with 'None' when they have been added to result
1259
current_merge_stack = [None]
1260
for info in view_revisions:
1261
rev_id, revno, depth = info
1262
if depth == len(current_merge_stack):
1263
current_merge_stack.append(info)
1265
del current_merge_stack[depth + 1:]
1266
current_merge_stack[-1] = info
1268
if rev_id in modified_text_revisions:
1269
# This needs to be logged, along with the extra revisions
1270
for idx in range(len(current_merge_stack)):
1271
node = current_merge_stack[idx]
1272
if node is not None:
1273
if include_merges or node[2] == 0:
1275
current_merge_stack[idx] = None
1279
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1280
"""Reverse revisions by depth.
1282
Revisions with a different depth are sorted as a group with the previous
1283
revision of that depth. There may be no topological justification for this
1284
but it looks much nicer.
1286
# Add a fake revision at start so that we can always attach sub revisions
1287
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1289
for val in merge_sorted_revisions:
1290
if val[2] == _depth:
1291
# Each revision at the current depth becomes a chunk grouping all
1292
# higher depth revisions.
1293
zd_revisions.append([val])
1295
zd_revisions[-1].append(val)
1296
for revisions in zd_revisions:
1297
if len(revisions) > 1:
1298
# We have higher depth revisions, let reverse them locally
1299
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1300
zd_revisions.reverse()
1302
for chunk in zd_revisions:
1303
result.extend(chunk)
1305
# Top level call, get rid of the fake revisions that have been added
1306
result = [r for r in result if r[0] is not None and r[1] is not None]
1310
class LogRevision(object):
1311
"""A revision to be logged (by LogFormatter.log_revision).
1313
A simple wrapper for the attributes of a revision to be logged.
1314
The attributes may or may not be populated, as determined by the
1315
logging options and the log formatter capabilities.
1318
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1319
tags=None, diff=None, signature=None):
1324
self.revno = str(revno)
1325
self.merge_depth = merge_depth
1329
self.signature = signature
1332
class LogFormatter(object):
1333
"""Abstract class to display log messages.
1335
At a minimum, a derived class must implement the log_revision method.
1337
If the LogFormatter needs to be informed of the beginning or end of
1338
a log it should implement the begin_log and/or end_log hook methods.
1340
A LogFormatter should define the following supports_XXX flags
1341
to indicate which LogRevision attributes it supports:
1343
- supports_delta must be True if this log formatter supports delta.
1344
Otherwise the delta attribute may not be populated. The 'delta_format'
1345
attribute describes whether the 'short_status' format (1) or the long
1346
one (2) should be used.
1348
- supports_merge_revisions must be True if this log formatter supports
1349
merge revisions. If not, then only mainline revisions will be passed
1352
- preferred_levels is the number of levels this formatter defaults to.
1353
The default value is zero meaning display all levels.
1354
This value is only relevant if supports_merge_revisions is True.
1356
- supports_tags must be True if this log formatter supports tags.
1357
Otherwise the tags attribute may not be populated.
1359
- supports_diff must be True if this log formatter supports diffs.
1360
Otherwise the diff attribute may not be populated.
1362
- supports_signatures must be True if this log formatter supports GPG
1365
Plugins can register functions to show custom revision properties using
1366
the properties_handler_registry. The registered function
1367
must respect the following interface description::
1369
def my_show_properties(properties_dict):
1370
# code that returns a dict {'name':'value'} of the properties
1373
preferred_levels = 0
1375
def __init__(self, to_file, show_ids=False, show_timezone='original',
1376
delta_format=None, levels=None, show_advice=False,
1377
to_exact_file=None, author_list_handler=None):
1378
"""Create a LogFormatter.
1380
:param to_file: the file to output to
1381
:param to_exact_file: if set, gives an output stream to which
1382
non-Unicode diffs are written.
1383
:param show_ids: if True, revision-ids are to be displayed
1384
:param show_timezone: the timezone to use
1385
:param delta_format: the level of delta information to display
1386
or None to leave it to the formatter to decide
1387
:param levels: the number of levels to display; None or -1 to
1388
let the log formatter decide.
1389
:param show_advice: whether to show advice at the end of the
1391
:param author_list_handler: callable generating a list of
1392
authors to display for a given revision
1394
self.to_file = to_file
1395
# 'exact' stream used to show diff, it should print content 'as is'
1396
# and should not try to decode/encode it to unicode to avoid bug
1398
if to_exact_file is not None:
1399
self.to_exact_file = to_exact_file
1401
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1402
# better for code that expects to get diffs to pass in the exact
1404
self.to_exact_file = getattr(to_file, 'stream', to_file)
1405
self.show_ids = show_ids
1406
self.show_timezone = show_timezone
1407
if delta_format is None:
1408
# Ensures backward compatibility
1409
delta_format = 2 # long format
1410
self.delta_format = delta_format
1411
self.levels = levels
1412
self._show_advice = show_advice
1413
self._merge_count = 0
1414
self._author_list_handler = author_list_handler
1416
def get_levels(self):
1417
"""Get the number of levels to display or 0 for all."""
1418
if getattr(self, 'supports_merge_revisions', False):
1419
if self.levels is None or self.levels == -1:
1420
self.levels = self.preferred_levels
1425
def log_revision(self, revision):
1428
:param revision: The LogRevision to be logged.
1430
raise NotImplementedError('not implemented in abstract base')
1432
def show_advice(self):
1433
"""Output user advice, if any, when the log is completed."""
1434
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1435
advice_sep = self.get_advice_separator()
1437
self.to_file.write(advice_sep)
1439
"Use --include-merged or -n0 to see merged revisions.\n")
1441
def get_advice_separator(self):
1442
"""Get the text separating the log from the closing advice."""
1445
def short_committer(self, rev):
1446
name, address = config.parse_username(rev.committer)
1451
def short_author(self, rev):
1452
return self.authors(rev, 'first', short=True, sep=', ')
1454
def authors(self, rev, who, short=False, sep=None):
1455
"""Generate list of authors, taking --authors option into account.
1457
The caller has to specify the name of a author list handler,
1458
as provided by the author list registry, using the ``who``
1459
argument. That name only sets a default, though: when the
1460
user selected a different author list generation using the
1461
``--authors`` command line switch, as represented by the
1462
``author_list_handler`` constructor argument, that value takes
1465
:param rev: The revision for which to generate the list of authors.
1466
:param who: Name of the default handler.
1467
:param short: Whether to shorten names to either name or address.
1468
:param sep: What separator to use for automatic concatenation.
1470
if self._author_list_handler is not None:
1471
# The user did specify --authors, which overrides the default
1472
author_list_handler = self._author_list_handler
1474
# The user didn't specify --authors, so we use the caller's default
1475
author_list_handler = author_list_registry.get(who)
1476
names = author_list_handler(rev)
1478
for i in range(len(names)):
1479
name, address = config.parse_username(names[i])
1485
names = sep.join(names)
1488
def merge_marker(self, revision):
1489
"""Get the merge marker to include in the output or '' if none."""
1490
if len(revision.rev.parent_ids) > 1:
1491
self._merge_count += 1
1496
def show_properties(self, revision, indent):
1497
"""Displays the custom properties returned by each registered handler.
1499
If a registered handler raises an error it is propagated.
1501
for line in self.custom_properties(revision):
1502
self.to_file.write("%s%s\n" % (indent, line))
1504
def custom_properties(self, revision):
1505
"""Format the custom properties returned by each registered handler.
1507
If a registered handler raises an error it is propagated.
1509
:return: a list of formatted lines (excluding trailing newlines)
1511
lines = self._foreign_info_properties(revision)
1512
for key, handler in properties_handler_registry.iteritems():
1514
lines.extend(self._format_properties(handler(revision)))
1516
trace.log_exception_quietly()
1517
trace.print_exception(sys.exc_info(), self.to_file)
1520
def _foreign_info_properties(self, rev):
1521
"""Custom log displayer for foreign revision identifiers.
1523
:param rev: Revision object.
1525
# Revision comes directly from a foreign repository
1526
if isinstance(rev, foreign.ForeignRevision):
1527
return self._format_properties(
1528
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1530
# Imported foreign revision revision ids always contain :
1531
if b":" not in rev.revision_id:
1534
# Revision was once imported from a foreign repository
1536
foreign_revid, mapping = \
1537
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1538
except errors.InvalidRevisionId:
1541
return self._format_properties(
1542
mapping.vcs.show_foreign_revid(foreign_revid))
1544
def _format_properties(self, properties):
1546
for key, value in properties.items():
1547
lines.append(key + ': ' + value)
1550
def show_diff(self, to_file, diff, indent):
1551
encoding = get_terminal_encoding()
1552
for l in diff.rstrip().split(b'\n'):
1553
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1556
# Separator between revisions in long format
1557
_LONG_SEP = '-' * 60
1560
class LongLogFormatter(LogFormatter):
1562
supports_merge_revisions = True
1563
preferred_levels = 1
1564
supports_delta = True
1565
supports_tags = True
1566
supports_diff = True
1567
supports_signatures = True
1569
def __init__(self, *args, **kwargs):
1570
super(LongLogFormatter, self).__init__(*args, **kwargs)
1571
if self.show_timezone == 'original':
1572
self.date_string = self._date_string_original_timezone
1574
self.date_string = self._date_string_with_timezone
1576
def _date_string_with_timezone(self, rev):
1577
return format_date(rev.timestamp, rev.timezone or 0,
1580
def _date_string_original_timezone(self, rev):
1581
return format_date_with_offset_in_original_timezone(rev.timestamp,
1584
def log_revision(self, revision):
1585
"""Log a revision, either merged or not."""
1586
indent = ' ' * revision.merge_depth
1588
if revision.revno is not None:
1589
lines.append('revno: %s%s' % (revision.revno,
1590
self.merge_marker(revision)))
1592
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1593
if self.show_ids or revision.revno is None:
1594
lines.append('revision-id: %s' %
1595
(revision.rev.revision_id.decode('utf-8'),))
1597
for parent_id in revision.rev.parent_ids:
1598
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1599
lines.extend(self.custom_properties(revision.rev))
1601
committer = revision.rev.committer
1602
authors = self.authors(revision.rev, 'all')
1603
if authors != [committer]:
1604
lines.append('author: %s' % (", ".join(authors),))
1605
lines.append('committer: %s' % (committer,))
1607
branch_nick = revision.rev.properties.get('branch-nick', None)
1608
if branch_nick is not None:
1609
lines.append('branch nick: %s' % (branch_nick,))
1611
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1613
if revision.signature is not None:
1614
lines.append('signature: ' + revision.signature)
1616
lines.append('message:')
1617
if not revision.rev.message:
1618
lines.append(' (no message)')
1620
message = revision.rev.message.rstrip('\r\n')
1621
for l in message.split('\n'):
1622
lines.append(' %s' % (l,))
1624
# Dump the output, appending the delta and diff if requested
1625
to_file = self.to_file
1626
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1627
if revision.delta is not None:
1628
# Use the standard status output to display changes
1629
from breezy.delta import report_delta
1630
report_delta(to_file, revision.delta, short_status=False,
1631
show_ids=self.show_ids, indent=indent)
1632
if revision.diff is not None:
1633
to_file.write(indent + 'diff:\n')
1635
# Note: we explicitly don't indent the diff (relative to the
1636
# revision information) so that the output can be fed to patch -p0
1637
self.show_diff(self.to_exact_file, revision.diff, indent)
1638
self.to_exact_file.flush()
1640
def get_advice_separator(self):
1641
"""Get the text separating the log from the closing advice."""
1642
return '-' * 60 + '\n'
1645
class ShortLogFormatter(LogFormatter):
1647
supports_merge_revisions = True
1648
preferred_levels = 1
1649
supports_delta = True
1650
supports_tags = True
1651
supports_diff = True
1653
def __init__(self, *args, **kwargs):
1654
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1655
self.revno_width_by_depth = {}
1657
def log_revision(self, revision):
1658
# We need two indents: one per depth and one for the information
1659
# relative to that indent. Most mainline revnos are 5 chars or
1660
# less while dotted revnos are typically 11 chars or less. Once
1661
# calculated, we need to remember the offset for a given depth
1662
# as we might be starting from a dotted revno in the first column
1663
# and we want subsequent mainline revisions to line up.
1664
depth = revision.merge_depth
1665
indent = ' ' * depth
1666
revno_width = self.revno_width_by_depth.get(depth)
1667
if revno_width is None:
1668
if revision.revno is None or revision.revno.find('.') == -1:
1669
# mainline revno, e.g. 12345
1672
# dotted revno, e.g. 12345.10.55
1674
self.revno_width_by_depth[depth] = revno_width
1675
offset = ' ' * (revno_width + 1)
1677
to_file = self.to_file
1680
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1681
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1682
revision.revno or "", self.short_author(
1684
format_date(revision.rev.timestamp,
1685
revision.rev.timezone or 0,
1686
self.show_timezone, date_fmt="%Y-%m-%d",
1688
tags, self.merge_marker(revision)))
1689
self.show_properties(revision.rev, indent + offset)
1690
if self.show_ids or revision.revno is None:
1691
to_file.write(indent + offset + 'revision-id:%s\n'
1692
% (revision.rev.revision_id.decode('utf-8'),))
1693
if not revision.rev.message:
1694
to_file.write(indent + offset + '(no message)\n')
1696
message = revision.rev.message.rstrip('\r\n')
1697
for l in message.split('\n'):
1698
to_file.write(indent + offset + '%s\n' % (l,))
1700
if revision.delta is not None:
1701
# Use the standard status output to display changes
1702
from breezy.delta import report_delta
1703
report_delta(to_file, revision.delta,
1704
short_status=self.delta_format == 1,
1705
show_ids=self.show_ids, indent=indent + offset)
1706
if revision.diff is not None:
1707
self.show_diff(self.to_exact_file, revision.diff, ' ')
1711
class LineLogFormatter(LogFormatter):
1713
supports_merge_revisions = True
1714
preferred_levels = 1
1715
supports_tags = True
1717
def __init__(self, *args, **kwargs):
1718
super(LineLogFormatter, self).__init__(*args, **kwargs)
1719
width = terminal_width()
1720
if width is not None:
1721
# we need one extra space for terminals that wrap on last char
1723
self._max_chars = width
1725
def truncate(self, str, max_len):
1726
if max_len is None or len(str) <= max_len:
1728
return str[:max_len - 3] + '...'
1730
def date_string(self, rev):
1731
return format_date(rev.timestamp, rev.timezone or 0,
1732
self.show_timezone, date_fmt="%Y-%m-%d",
1735
def message(self, rev):
1737
return '(no message)'
1741
def log_revision(self, revision):
1742
indent = ' ' * revision.merge_depth
1743
self.to_file.write(self.log_string(revision.revno, revision.rev,
1744
self._max_chars, revision.tags, indent))
1745
self.to_file.write('\n')
1747
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1748
"""Format log info into one string. Truncate tail of string
1750
:param revno: revision number or None.
1751
Revision numbers counts from 1.
1752
:param rev: revision object
1753
:param max_chars: maximum length of resulting string
1754
:param tags: list of tags or None
1755
:param prefix: string to prefix each line
1756
:return: formatted truncated string
1760
# show revno only when is not None
1761
out.append("%s:" % revno)
1762
if max_chars is not None:
1763
out.append(self.truncate(
1764
self.short_author(rev), (max_chars + 3) // 4))
1766
out.append(self.short_author(rev))
1767
out.append(self.date_string(rev))
1768
if len(rev.parent_ids) > 1:
1769
out.append('[merge]')
1771
tag_str = '{%s}' % (', '.join(sorted(tags)))
1773
out.append(rev.get_summary())
1774
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1777
class GnuChangelogLogFormatter(LogFormatter):
1779
supports_merge_revisions = True
1780
supports_delta = True
1782
def log_revision(self, revision):
1783
"""Log a revision, either merged or not."""
1784
to_file = self.to_file
1786
date_str = format_date(revision.rev.timestamp,
1787
revision.rev.timezone or 0,
1789
date_fmt='%Y-%m-%d',
1791
committer_str = self.authors(revision.rev, 'first', sep=', ')
1792
committer_str = committer_str.replace(' <', ' <')
1793
to_file.write('%s %s\n\n' % (date_str, committer_str))
1795
if revision.delta is not None and revision.delta.has_changed():
1796
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1797
if c.path[0] is None:
1801
to_file.write('\t* %s:\n' % (path,))
1802
for c in revision.delta.renamed + revision.delta.copied:
1803
# For renamed files, show both the old and the new path
1804
to_file.write('\t* %s:\n\t* %s:\n' % (c.path[0], c.path[1]))
1807
if not revision.rev.message:
1808
to_file.write('\tNo commit message\n')
1810
message = revision.rev.message.rstrip('\r\n')
1811
for l in message.split('\n'):
1812
to_file.write('\t%s\n' % (l.lstrip(),))
1816
def line_log(rev, max_chars):
1817
lf = LineLogFormatter(None)
1818
return lf.log_string(None, rev, max_chars)
1821
class LogFormatterRegistry(registry.Registry):
1822
"""Registry for log formatters"""
1824
def make_formatter(self, name, *args, **kwargs):
1825
"""Construct a formatter from arguments.
1827
:param name: Name of the formatter to construct. 'short', 'long' and
1828
'line' are built-in.
1830
return self.get(name)(*args, **kwargs)
1832
def get_default(self, branch):
1833
c = branch.get_config_stack()
1834
return self.get(c.get('log_format'))
1837
log_formatter_registry = LogFormatterRegistry()
1840
log_formatter_registry.register('short', ShortLogFormatter,
1841
'Moderately short log format.')
1842
log_formatter_registry.register('long', LongLogFormatter,
1843
'Detailed log format.')
1844
log_formatter_registry.register('line', LineLogFormatter,
1845
'Log format with one line per revision.')
1846
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1847
'Format used by GNU ChangeLog files.')
1850
def register_formatter(name, formatter):
1851
log_formatter_registry.register(name, formatter)
1854
def log_formatter(name, *args, **kwargs):
1855
"""Construct a formatter from arguments.
1857
name -- Name of the formatter to construct; currently 'long', 'short' and
1858
'line' are supported.
1861
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1863
raise errors.BzrCommandError(
1864
gettext("unknown log formatter: %r") % name)
1867
def author_list_all(rev):
1868
return rev.get_apparent_authors()[:]
1871
def author_list_first(rev):
1872
lst = rev.get_apparent_authors()
1879
def author_list_committer(rev):
1880
return [rev.committer]
1883
author_list_registry = registry.Registry()
1885
author_list_registry.register('all', author_list_all,
1888
author_list_registry.register('first', author_list_first,
1891
author_list_registry.register('committer', author_list_committer,
1895
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1897
"""Show the change in revision history comparing the old revision history to the new one.
1899
:param branch: The branch where the revisions exist
1900
:param old_rh: The old revision history
1901
:param new_rh: The new revision history
1902
:param to_file: A file to write the results to. If None, stdout will be used
1905
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1907
lf = log_formatter(log_format,
1910
show_timezone='original')
1912
# This is the first index which is different between
1915
for i in range(max(len(new_rh), len(old_rh))):
1916
if (len(new_rh) <= i
1918
or new_rh[i] != old_rh[i]):
1922
if base_idx is None:
1923
to_file.write('Nothing seems to have changed\n')
1925
# TODO: It might be nice to do something like show_log
1926
# and show the merged entries. But since this is the
1927
# removed revisions, it shouldn't be as important
1928
if base_idx < len(old_rh):
1929
to_file.write('*' * 60)
1930
to_file.write('\nRemoved Revisions:\n')
1931
for i in range(base_idx, len(old_rh)):
1932
rev = branch.repository.get_revision(old_rh[i])
1933
lr = LogRevision(rev, i + 1, 0, None)
1935
to_file.write('*' * 60)
1936
to_file.write('\n\n')
1937
if base_idx < len(new_rh):
1938
to_file.write('Added Revisions:\n')
1942
direction='forward',
1943
start_revision=base_idx + 1,
1944
end_revision=len(new_rh),
1948
def get_history_change(old_revision_id, new_revision_id, repository):
1949
"""Calculate the uncommon lefthand history between two revisions.
1951
:param old_revision_id: The original revision id.
1952
:param new_revision_id: The new revision id.
1953
:param repository: The repository to use for the calculation.
1955
return old_history, new_history
1958
old_revisions = set()
1960
new_revisions = set()
1961
graph = repository.get_graph()
1962
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1963
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1964
stop_revision = None
1967
while do_new or do_old:
1970
new_revision = next(new_iter)
1971
except StopIteration:
1974
new_history.append(new_revision)
1975
new_revisions.add(new_revision)
1976
if new_revision in old_revisions:
1977
stop_revision = new_revision
1981
old_revision = next(old_iter)
1982
except StopIteration:
1985
old_history.append(old_revision)
1986
old_revisions.add(old_revision)
1987
if old_revision in new_revisions:
1988
stop_revision = old_revision
1990
new_history.reverse()
1991
old_history.reverse()
1992
if stop_revision is not None:
1993
new_history = new_history[new_history.index(stop_revision) + 1:]
1994
old_history = old_history[old_history.index(stop_revision) + 1:]
1995
return old_history, new_history
1998
def show_branch_change(branch, output, old_revno, old_revision_id):
1999
"""Show the changes made to a branch.
2001
:param branch: The branch to show changes about.
2002
:param output: A file-like object to write changes to.
2003
:param old_revno: The revno of the old tip.
2004
:param old_revision_id: The revision_id of the old tip.
2006
new_revno, new_revision_id = branch.last_revision_info()
2007
old_history, new_history = get_history_change(old_revision_id,
2010
if old_history == [] and new_history == []:
2011
output.write('Nothing seems to have changed\n')
2014
log_format = log_formatter_registry.get_default(branch)
2015
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2016
if old_history != []:
2017
output.write('*' * 60)
2018
output.write('\nRemoved Revisions:\n')
2019
show_flat_log(branch.repository, old_history, old_revno, lf)
2020
output.write('*' * 60)
2021
output.write('\n\n')
2022
if new_history != []:
2023
output.write('Added Revisions:\n')
2024
start_revno = new_revno - len(new_history) + 1
2025
show_log(branch, lf, verbose=False, direction='forward',
2026
start_revision=start_revno)
2029
def show_flat_log(repository, history, last_revno, lf):
2030
"""Show a simple log of the specified history.
2032
:param repository: The repository to retrieve revisions from.
2033
:param history: A list of revision_ids indicating the lefthand history.
2034
:param last_revno: The revno of the last revision_id in the history.
2035
:param lf: The log formatter to use.
2037
revisions = repository.get_revisions(history)
2038
for i, rev in enumerate(revisions):
2039
lr = LogRevision(rev, i + last_revno, 0, None)
2043
def _get_info_for_log_files(revisionspec_list, file_list, exit_stack):
2044
"""Find file-ids and kinds given a list of files and a revision range.
2046
We search for files at the end of the range. If not found there,
2047
we try the start of the range.
2049
:param revisionspec_list: revision range as parsed on the command line
2050
:param file_list: the list of paths given on the command line;
2051
the first of these can be a branch location or a file path,
2052
the remainder must be file paths
2053
:param exit_stack: When the branch returned is read locked,
2054
an unlock call will be queued to the exit stack.
2055
:return: (branch, info_list, start_rev_info, end_rev_info) where
2056
info_list is a list of (relative_path, file_id, kind) tuples where
2057
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2058
branch will be read-locked.
2060
from breezy.builtins import _get_revision_range
2061
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2063
exit_stack.enter_context(b.lock_read())
2064
# XXX: It's damn messy converting a list of paths to relative paths when
2065
# those paths might be deleted ones, they might be on a case-insensitive
2066
# filesystem and/or they might be in silly locations (like another branch).
2067
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2068
# file2 implicitly in the same dir as file1 or should its directory be
2069
# taken from the current tree somehow?) For now, this solves the common
2070
# case of running log in a nested directory, assuming paths beyond the
2071
# first one haven't been deleted ...
2073
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2075
relpaths = [path] + file_list[1:]
2077
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2079
if relpaths in ([], [u'']):
2080
return b, [], start_rev_info, end_rev_info
2081
if start_rev_info is None and end_rev_info is None:
2083
tree = b.basis_tree()
2086
file_id = tree.path2id(fp)
2087
kind = _get_kind_for_file_id(tree, fp, file_id)
2089
# go back to when time began
2092
rev1 = b.get_rev_id(1)
2093
except errors.NoSuchRevision:
2098
tree1 = b.repository.revision_tree(rev1)
2100
file_id = tree1.path2id(fp)
2101
kind = _get_kind_for_file_id(tree1, fp, file_id)
2102
info_list.append((fp, file_id, kind))
2104
elif start_rev_info == end_rev_info:
2105
# One revision given - file must exist in it
2106
tree = b.repository.revision_tree(end_rev_info.rev_id)
2108
file_id = tree.path2id(fp)
2109
kind = _get_kind_for_file_id(tree, fp, file_id)
2110
info_list.append((fp, file_id, kind))
2113
# Revision range given. Get the file-id from the end tree.
2114
# If that fails, try the start tree.
2115
rev_id = end_rev_info.rev_id
2117
tree = b.basis_tree()
2119
tree = b.repository.revision_tree(rev_id)
2122
file_id = tree.path2id(fp)
2123
kind = _get_kind_for_file_id(tree, fp, file_id)
2126
rev_id = start_rev_info.rev_id
2128
rev1 = b.get_rev_id(1)
2129
tree1 = b.repository.revision_tree(rev1)
2131
tree1 = b.repository.revision_tree(rev_id)
2132
file_id = tree1.path2id(fp)
2133
kind = _get_kind_for_file_id(tree1, fp, file_id)
2134
info_list.append((fp, file_id, kind))
2135
return b, info_list, start_rev_info, end_rev_info
2138
def _get_kind_for_file_id(tree, path, file_id):
2139
"""Return the kind of a file-id or None if it doesn't exist."""
2140
if file_id is not None:
2141
return tree.kind(path)
2146
properties_handler_registry = registry.Registry()
2148
# Use the properties handlers to print out bug information if available
2151
def _bugs_properties_handler(revision):
2153
related_bug_urls = []
2154
for bug_url, status in revision.iter_bugs():
2155
if status == 'fixed':
2156
fixed_bug_urls.append(bug_url)
2157
elif status == 'related':
2158
related_bug_urls.append(bug_url)
2161
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2162
ret[text] = ' '.join(fixed_bug_urls)
2163
if related_bug_urls:
2164
text = ngettext('related bug', 'related bugs',
2165
len(related_bug_urls))
2166
ret[text] = ' '.join(related_bug_urls)
2170
properties_handler_registry.register('bugs_properties_handler',
2171
_bugs_properties_handler)
2174
# adapters which revision ids to log are filtered. When log is called, the
2175
# log_rev_iterator is adapted through each of these factory methods.
2176
# Plugins are welcome to mutate this list in any way they like - as long
2177
# as the overall behaviour is preserved. At this point there is no extensible
2178
# mechanism for getting parameters to each factory method, and until there is
2179
# this won't be considered a stable api.
2183
# read revision objects
2184
_make_revision_objects,
2185
# filter on log messages
2186
_make_search_filter,
2187
# generate deltas for things we will show