1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
69
revision as _mod_revision,
71
from breezy.i18n import gettext, ngettext
80
from .osutils import (
82
format_date_with_offset_in_original_timezone,
83
get_diff_header_encoding,
84
get_terminal_encoding,
92
from .tree import find_previous_path
95
def find_touching_revisions(repository, last_revision, last_tree, last_path):
96
"""Yield a description of revisions which affect the file_id.
98
Each returned element is (revno, revision_id, description)
100
This is the list of revisions where the file is either added,
101
modified, renamed or deleted.
103
TODO: Perhaps some way to limit this to only particular revisions,
104
or to traverse a non-mainline set of revisions?
106
last_verifier = last_tree.get_file_verifier(last_path)
107
graph = repository.get_graph()
108
history = list(graph.iter_lefthand_ancestry(last_revision, []))
110
for revision_id in history:
111
this_tree = repository.revision_tree(revision_id)
112
this_path = find_previous_path(last_tree, this_tree, last_path)
114
# now we know how it was last time, and how it is in this revision.
115
# are those two states effectively the same or not?
116
if this_path is not None and last_path is None:
117
yield revno, revision_id, "deleted " + this_path
118
this_verifier = this_tree.get_file_verifier(this_path)
119
elif this_path is None and last_path is not None:
120
yield revno, revision_id, "added " + last_path
121
elif this_path != last_path:
122
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
123
this_verifier = this_tree.get_file_verifier(this_path)
125
this_verifier = this_tree.get_file_verifier(this_path)
126
if (this_verifier != last_verifier):
127
yield revno, revision_id, "modified " + this_path
129
last_verifier = this_verifier
130
last_path = this_path
131
last_tree = this_tree
132
if last_path is None:
139
specific_fileid=None,
148
"""Write out human-readable log of commits to this branch.
150
This function is being retained for backwards compatibility but
151
should not be extended with new parameters. Use the new Logger class
152
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
153
make_log_request_dict function.
155
:param lf: The LogFormatter object showing the output.
157
:param specific_fileid: If not None, list only the commits affecting the
158
specified file, rather than all commits.
160
:param verbose: If True show added/changed/deleted/renamed files.
162
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
165
:param start_revision: If not None, only show revisions >= start_revision
167
:param end_revision: If not None, only show revisions <= end_revision
169
:param search: If not None, only show revisions with matching commit
172
:param limit: If set, shows only 'limit' revisions, all revisions are shown
175
:param show_diff: If True, output a diff after each revision.
177
:param match: Dictionary of search lists to use when matching revision
180
# Convert old-style parameters to new-style parameters
181
if specific_fileid is not None:
182
file_ids = [specific_fileid]
187
delta_type = 'partial'
194
diff_type = 'partial'
200
if isinstance(start_revision, int):
202
start_revision = revisionspec.RevisionInfo(branch, start_revision)
203
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
204
raise errors.InvalidRevisionNumber(start_revision)
206
if isinstance(end_revision, int):
208
end_revision = revisionspec.RevisionInfo(branch, end_revision)
209
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
210
raise errors.InvalidRevisionNumber(end_revision)
212
if end_revision is not None and end_revision.revno == 0:
213
raise errors.InvalidRevisionNumber(end_revision.revno)
215
# Build the request and execute it
216
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
217
start_revision=start_revision, end_revision=end_revision,
218
limit=limit, message_search=search,
219
delta_type=delta_type, diff_type=diff_type)
220
Logger(branch, rqst).show(lf)
223
# Note: This needs to be kept in sync with the defaults in
224
# make_log_request_dict() below
225
_DEFAULT_REQUEST_PARAMS = {
226
'direction': 'reverse',
228
'generate_tags': True,
229
'exclude_common_ancestry': False,
230
'_match_using_deltas': True,
234
def make_log_request_dict(direction='reverse', specific_fileids=None,
235
start_revision=None, end_revision=None, limit=None,
236
message_search=None, levels=None, generate_tags=True,
238
diff_type=None, _match_using_deltas=True,
239
exclude_common_ancestry=False, match=None,
240
signature=False, omit_merges=False,
242
"""Convenience function for making a logging request dictionary.
244
Using this function may make code slightly safer by ensuring
245
parameters have the correct names. It also provides a reference
246
point for documenting the supported parameters.
248
:param direction: 'reverse' (default) is latest to earliest;
249
'forward' is earliest to latest.
251
:param specific_fileids: If not None, only include revisions
252
affecting the specified files, rather than all revisions.
254
:param start_revision: If not None, only generate
255
revisions >= start_revision
257
:param end_revision: If not None, only generate
258
revisions <= end_revision
260
:param limit: If set, generate only 'limit' revisions, all revisions
261
are shown if None or 0.
263
:param message_search: If not None, only include revisions with
264
matching commit messages
266
:param levels: the number of levels of revisions to
267
generate; 1 for just the mainline; 0 for all levels, or None for
270
:param generate_tags: If True, include tags for matched revisions.
272
:param delta_type: Either 'full', 'partial' or None.
273
'full' means generate the complete delta - adds/deletes/modifies/etc;
274
'partial' means filter the delta using specific_fileids;
275
None means do not generate any delta.
277
:param diff_type: Either 'full', 'partial' or None.
278
'full' means generate the complete diff - adds/deletes/modifies/etc;
279
'partial' means filter the diff using specific_fileids;
280
None means do not generate any diff.
282
:param _match_using_deltas: a private parameter controlling the
283
algorithm used for matching specific_fileids. This parameter
284
may be removed in the future so breezy client code should NOT
287
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
288
range operator or as a graph difference.
290
:param signature: show digital signature information
292
:param match: Dictionary of list of search strings to use when filtering
293
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
294
the empty string to match any of the preceding properties.
296
:param omit_merges: If True, commits with more than one parent are
300
# Take care of old style message_search parameter
303
if 'message' in match:
304
match['message'].append(message_search)
306
match['message'] = [message_search]
308
match = {'message': [message_search]}
310
'direction': direction,
311
'specific_fileids': specific_fileids,
312
'start_revision': start_revision,
313
'end_revision': end_revision,
316
'generate_tags': generate_tags,
317
'delta_type': delta_type,
318
'diff_type': diff_type,
319
'exclude_common_ancestry': exclude_common_ancestry,
320
'signature': signature,
322
'omit_merges': omit_merges,
323
# Add 'private' attributes for features that may be deprecated
324
'_match_using_deltas': _match_using_deltas,
328
def _apply_log_request_defaults(rqst):
329
"""Apply default values to a request dictionary."""
330
result = _DEFAULT_REQUEST_PARAMS.copy()
336
def format_signature_validity(rev_id, branch):
337
"""get the signature validity
339
:param rev_id: revision id to validate
340
:param branch: branch of revision
341
:return: human readable string to print to log
343
from breezy import gpg
345
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
346
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
347
if result[0] == gpg.SIGNATURE_VALID:
348
return u"valid signature from {0}".format(result[1])
349
if result[0] == gpg.SIGNATURE_KEY_MISSING:
350
return "unknown key {0}".format(result[1])
351
if result[0] == gpg.SIGNATURE_NOT_VALID:
352
return "invalid signature!"
353
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
354
return "no signature"
357
class LogGenerator(object):
358
"""A generator of log revisions."""
360
def iter_log_revisions(self):
361
"""Iterate over LogRevision objects.
363
:return: An iterator yielding LogRevision objects.
365
raise NotImplementedError(self.iter_log_revisions)
368
class Logger(object):
369
"""An object that generates, formats and displays a log."""
371
def __init__(self, branch, rqst):
374
:param branch: the branch to log
375
:param rqst: A dictionary specifying the query parameters.
376
See make_log_request_dict() for supported values.
379
self.rqst = _apply_log_request_defaults(rqst)
384
:param lf: The LogFormatter object to send the output to.
386
if not isinstance(lf, LogFormatter):
387
warn("not a LogFormatter instance: %r" % lf)
389
with self.branch.lock_read():
390
if getattr(lf, 'begin_log', None):
393
if getattr(lf, 'end_log', None):
396
def _show_body(self, lf):
397
"""Show the main log output.
399
Subclasses may wish to override this.
401
# Tweak the LogRequest based on what the LogFormatter can handle.
402
# (There's no point generating stuff if the formatter can't display it.)
404
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
405
# user didn't specify levels, use whatever the LF can handle:
406
rqst['levels'] = lf.get_levels()
408
if not getattr(lf, 'supports_tags', False):
409
rqst['generate_tags'] = False
410
if not getattr(lf, 'supports_delta', False):
411
rqst['delta_type'] = None
412
if not getattr(lf, 'supports_diff', False):
413
rqst['diff_type'] = None
414
if not getattr(lf, 'supports_signatures', False):
415
rqst['signature'] = False
417
# Find and print the interesting revisions
418
generator = self._generator_factory(self.branch, rqst)
420
for lr in generator.iter_log_revisions():
422
except errors.GhostRevisionUnusableHere:
423
raise errors.BzrCommandError(
424
gettext('Further revision history missing.'))
427
def _generator_factory(self, branch, rqst):
428
"""Make the LogGenerator object to use.
430
Subclasses may wish to override this.
432
return _DefaultLogGenerator(branch, rqst)
435
class _StartNotLinearAncestor(Exception):
436
"""Raised when a start revision is not found walking left-hand history."""
439
class _DefaultLogGenerator(LogGenerator):
440
"""The default generator of log revisions."""
442
def __init__(self, branch, rqst):
445
if rqst.get('generate_tags') and branch.supports_tags():
446
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
448
self.rev_tag_dict = {}
450
def iter_log_revisions(self):
451
"""Iterate over LogRevision objects.
453
:return: An iterator yielding LogRevision objects.
456
levels = rqst.get('levels')
457
limit = rqst.get('limit')
458
diff_type = rqst.get('diff_type')
459
show_signature = rqst.get('signature')
460
omit_merges = rqst.get('omit_merges')
462
revision_iterator = self._create_log_revision_iterator()
463
for revs in revision_iterator:
464
for (rev_id, revno, merge_depth), rev, delta in revs:
465
# 0 levels means show everything; merge_depth counts from 0
466
if (levels != 0 and merge_depth is not None and
467
merge_depth >= levels):
469
if omit_merges and len(rev.parent_ids) > 1:
472
raise errors.GhostRevisionUnusableHere(rev_id)
473
if diff_type is None:
476
diff = self._format_diff(rev, rev_id, diff_type)
478
signature = format_signature_validity(rev_id, self.branch)
482
rev, revno, merge_depth, delta,
483
self.rev_tag_dict.get(rev_id), diff, signature)
486
if log_count >= limit:
489
def _format_diff(self, rev, rev_id, diff_type):
490
repo = self.branch.repository
491
if len(rev.parent_ids) == 0:
492
ancestor_id = _mod_revision.NULL_REVISION
494
ancestor_id = rev.parent_ids[0]
495
tree_1 = repo.revision_tree(ancestor_id)
496
tree_2 = repo.revision_tree(rev_id)
497
file_ids = self.rqst.get('specific_fileids')
498
if diff_type == 'partial' and file_ids is not None:
499
specific_files = [tree_2.id2path(id) for id in file_ids]
501
specific_files = None
503
path_encoding = get_diff_header_encoding()
504
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
505
new_label='', path_encoding=path_encoding)
508
def _create_log_revision_iterator(self):
509
"""Create a revision iterator for log.
511
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
514
self.start_rev_id, self.end_rev_id = _get_revision_limits(
515
self.branch, self.rqst.get('start_revision'),
516
self.rqst.get('end_revision'))
517
if self.rqst.get('_match_using_deltas'):
518
return self._log_revision_iterator_using_delta_matching()
520
# We're using the per-file-graph algorithm. This scales really
521
# well but only makes sense if there is a single file and it's
523
file_count = len(self.rqst.get('specific_fileids'))
525
raise errors.BzrError(
526
"illegal LogRequest: must match-using-deltas "
527
"when logging %d files" % file_count)
528
return self._log_revision_iterator_using_per_file_graph()
530
def _log_revision_iterator_using_delta_matching(self):
531
# Get the base revisions, filtering by the revision range
533
generate_merge_revisions = rqst.get('levels') != 1
534
delayed_graph_generation = not rqst.get('specific_fileids') and (
535
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
536
view_revisions = _calc_view_revisions(
537
self.branch, self.start_rev_id, self.end_rev_id,
538
rqst.get('direction'),
539
generate_merge_revisions=generate_merge_revisions,
540
delayed_graph_generation=delayed_graph_generation,
541
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
543
# Apply the other filters
544
return make_log_rev_iterator(self.branch, view_revisions,
545
rqst.get('delta_type'), rqst.get('match'),
546
file_ids=rqst.get('specific_fileids'),
547
direction=rqst.get('direction'))
549
def _log_revision_iterator_using_per_file_graph(self):
550
# Get the base revisions, filtering by the revision range.
551
# Note that we always generate the merge revisions because
552
# filter_revisions_touching_file_id() requires them ...
554
view_revisions = _calc_view_revisions(
555
self.branch, self.start_rev_id, self.end_rev_id,
556
rqst.get('direction'), generate_merge_revisions=True,
557
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
558
if not isinstance(view_revisions, list):
559
view_revisions = list(view_revisions)
560
view_revisions = _filter_revisions_touching_file_id(self.branch,
561
rqst.get('specific_fileids')[
563
include_merges=rqst.get('levels') != 1)
564
return make_log_rev_iterator(self.branch, view_revisions,
565
rqst.get('delta_type'), rqst.get('match'))
568
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
569
generate_merge_revisions,
570
delayed_graph_generation=False,
571
exclude_common_ancestry=False,
573
"""Calculate the revisions to view.
575
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
576
a list of the same tuples.
578
if (exclude_common_ancestry and start_rev_id == end_rev_id):
579
raise errors.BzrCommandError(gettext(
580
'--exclude-common-ancestry requires two different revisions'))
581
if direction not in ('reverse', 'forward'):
582
raise ValueError(gettext('invalid direction %r') % direction)
583
br_rev_id = branch.last_revision()
584
if br_rev_id == _mod_revision.NULL_REVISION:
587
if (end_rev_id and start_rev_id == end_rev_id
588
and (not generate_merge_revisions
589
or not _has_merges(branch, end_rev_id))):
590
# If a single revision is requested, check we can handle it
591
return _generate_one_revision(branch, end_rev_id, br_rev_id,
593
if not generate_merge_revisions:
595
# If we only want to see linear revisions, we can iterate ...
596
iter_revs = _linear_view_revisions(
597
branch, start_rev_id, end_rev_id,
598
exclude_common_ancestry=exclude_common_ancestry)
599
# If a start limit was given and it's not obviously an
600
# ancestor of the end limit, check it before outputting anything
601
if (direction == 'forward'
602
or (start_rev_id and not _is_obvious_ancestor(
603
branch, start_rev_id, end_rev_id))):
604
iter_revs = list(iter_revs)
605
if direction == 'forward':
606
iter_revs = reversed(iter_revs)
608
except _StartNotLinearAncestor:
609
# Switch to the slower implementation that may be able to find a
610
# non-obvious ancestor out of the left-hand history.
612
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
613
direction, delayed_graph_generation,
614
exclude_common_ancestry)
615
if direction == 'forward':
616
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
620
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
621
if rev_id == br_rev_id:
623
return [(br_rev_id, br_revno, 0)]
625
revno_str = _compute_revno_str(branch, rev_id)
626
return [(rev_id, revno_str, 0)]
629
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
630
delayed_graph_generation,
631
exclude_common_ancestry=False):
632
# On large trees, generating the merge graph can take 30-60 seconds
633
# so we delay doing it until a merge is detected, incrementally
634
# returning initial (non-merge) revisions while we can.
636
# The above is only true for old formats (<= 0.92), for newer formats, a
637
# couple of seconds only should be needed to load the whole graph and the
638
# other graph operations needed are even faster than that -- vila 100201
639
initial_revisions = []
640
if delayed_graph_generation:
642
for rev_id, revno, depth in _linear_view_revisions(
643
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
644
if _has_merges(branch, rev_id):
645
# The end_rev_id can be nested down somewhere. We need an
646
# explicit ancestry check. There is an ambiguity here as we
647
# may not raise _StartNotLinearAncestor for a revision that
648
# is an ancestor but not a *linear* one. But since we have
649
# loaded the graph to do the check (or calculate a dotted
650
# revno), we may as well accept to show the log... We need
651
# the check only if start_rev_id is not None as all
652
# revisions have _mod_revision.NULL_REVISION as an ancestor
654
graph = branch.repository.get_graph()
655
if (start_rev_id is not None
656
and not graph.is_ancestor(start_rev_id, end_rev_id)):
657
raise _StartNotLinearAncestor()
658
# Since we collected the revisions so far, we need to
663
initial_revisions.append((rev_id, revno, depth))
665
# No merged revisions found
666
return initial_revisions
667
except _StartNotLinearAncestor:
668
# A merge was never detected so the lower revision limit can't
669
# be nested down somewhere
670
raise errors.BzrCommandError(gettext('Start revision not found in'
671
' history of end revision.'))
673
# We exit the loop above because we encounter a revision with merges, from
674
# this revision, we need to switch to _graph_view_revisions.
676
# A log including nested merges is required. If the direction is reverse,
677
# we rebase the initial merge depths so that the development line is
678
# shown naturally, i.e. just like it is for linear logging. We can easily
679
# make forward the exact opposite display, but showing the merge revisions
680
# indented at the end seems slightly nicer in that case.
681
view_revisions = itertools.chain(iter(initial_revisions),
682
_graph_view_revisions(branch, start_rev_id, end_rev_id,
683
rebase_initial_depths=(
684
direction == 'reverse'),
685
exclude_common_ancestry=exclude_common_ancestry))
686
return view_revisions
689
def _has_merges(branch, rev_id):
690
"""Does a revision have multiple parents or not?"""
691
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
692
return len(parents) > 1
695
def _compute_revno_str(branch, rev_id):
696
"""Compute the revno string from a rev_id.
698
:return: The revno string, or None if the revision is not in the supplied
702
revno = branch.revision_id_to_dotted_revno(rev_id)
703
except errors.NoSuchRevision:
704
# The revision must be outside of this branch
707
return '.'.join(str(n) for n in revno)
710
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
711
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
712
if start_rev_id and end_rev_id:
714
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
715
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
716
except errors.NoSuchRevision:
717
# one or both is not in the branch; not obvious
719
if len(start_dotted) == 1 and len(end_dotted) == 1:
721
return start_dotted[0] <= end_dotted[0]
722
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
723
start_dotted[0:1] == end_dotted[0:1]):
724
# both on same development line
725
return start_dotted[2] <= end_dotted[2]
729
# if either start or end is not specified then we use either the first or
730
# the last revision and *they* are obvious ancestors.
734
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
735
exclude_common_ancestry=False):
736
"""Calculate a sequence of revisions to view, newest to oldest.
738
:param start_rev_id: the lower revision-id
739
:param end_rev_id: the upper revision-id
740
:param exclude_common_ancestry: Whether the start_rev_id should be part of
741
the iterated revisions.
742
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
743
dotted_revno will be None for ghosts
744
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
745
is not found walking the left-hand history
747
repo = branch.repository
748
graph = repo.get_graph()
749
if start_rev_id is None and end_rev_id is None:
751
br_revno, br_rev_id = branch.last_revision_info()
752
except errors.GhostRevisionsHaveNoRevno:
753
br_rev_id = branch.last_revision()
757
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
758
(_mod_revision.NULL_REVISION,))
761
revision_id = next(graph_iter)
762
except errors.RevisionNotPresent as e:
764
yield e.revision_id, None, None
766
except StopIteration:
769
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
770
if cur_revno is not None:
773
br_rev_id = branch.last_revision()
774
if end_rev_id is None:
775
end_rev_id = br_rev_id
776
found_start = start_rev_id is None
777
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
778
(_mod_revision.NULL_REVISION,))
781
revision_id = next(graph_iter)
782
except StopIteration:
784
except errors.RevisionNotPresent as e:
786
yield e.revision_id, None, None
789
revno_str = _compute_revno_str(branch, revision_id)
790
if not found_start and revision_id == start_rev_id:
791
if not exclude_common_ancestry:
792
yield revision_id, revno_str, 0
796
yield revision_id, revno_str, 0
798
raise _StartNotLinearAncestor()
801
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
802
rebase_initial_depths=True,
803
exclude_common_ancestry=False):
804
"""Calculate revisions to view including merges, newest to oldest.
806
:param branch: the branch
807
:param start_rev_id: the lower revision-id
808
:param end_rev_id: the upper revision-id
809
:param rebase_initial_depth: should depths be rebased until a mainline
811
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
813
if exclude_common_ancestry:
814
stop_rule = 'with-merges-without-common-ancestry'
816
stop_rule = 'with-merges'
817
view_revisions = branch.iter_merge_sorted_revisions(
818
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
820
if not rebase_initial_depths:
821
for (rev_id, merge_depth, revno, end_of_merge
823
yield rev_id, '.'.join(map(str, revno)), merge_depth
825
# We're following a development line starting at a merged revision.
826
# We need to adjust depths down by the initial depth until we find
827
# a depth less than it. Then we use that depth as the adjustment.
828
# If and when we reach the mainline, depth adjustment ends.
829
depth_adjustment = None
830
for (rev_id, merge_depth, revno, end_of_merge
832
if depth_adjustment is None:
833
depth_adjustment = merge_depth
835
if merge_depth < depth_adjustment:
836
# From now on we reduce the depth adjustement, this can be
837
# surprising for users. The alternative requires two passes
838
# which breaks the fast display of the first revision
840
depth_adjustment = merge_depth
841
merge_depth -= depth_adjustment
842
yield rev_id, '.'.join(map(str, revno)), merge_depth
845
def _rebase_merge_depth(view_revisions):
846
"""Adjust depths upwards so the top level is 0."""
847
# If either the first or last revision have a merge_depth of 0, we're done
848
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
849
min_depth = min([d for r, n, d in view_revisions])
851
view_revisions = [(r, n, d - min_depth)
852
for r, n, d in view_revisions]
853
return view_revisions
856
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
857
file_ids=None, direction='reverse'):
858
"""Create a revision iterator for log.
860
:param branch: The branch being logged.
861
:param view_revisions: The revisions being viewed.
862
:param generate_delta: Whether to generate a delta for each revision.
863
Permitted values are None, 'full' and 'partial'.
864
:param search: A user text search string.
865
:param file_ids: If non empty, only revisions matching one or more of
866
the file-ids are to be kept.
867
:param direction: the direction in which view_revisions is sorted
868
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
871
# Convert view_revisions into (view, None, None) groups to fit with
872
# the standard interface here.
873
if isinstance(view_revisions, list):
874
# A single batch conversion is faster than many incremental ones.
875
# As we have all the data, do a batch conversion.
876
nones = [None] * len(view_revisions)
877
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
880
for view in view_revisions:
881
yield (view, None, None)
882
log_rev_iterator = iter([_convert()])
883
for adapter in log_adapters:
884
# It would be nicer if log adapters were first class objects
885
# with custom parameters. This will do for now. IGC 20090127
886
if adapter == _make_delta_filter:
887
log_rev_iterator = adapter(
888
branch, generate_delta, search, log_rev_iterator, file_ids,
891
log_rev_iterator = adapter(
892
branch, generate_delta, search, log_rev_iterator)
893
return log_rev_iterator
896
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
897
"""Create a filtered iterator of log_rev_iterator matching on a regex.
899
:param branch: The branch being logged.
900
:param generate_delta: Whether to generate a delta for each revision.
901
:param match: A dictionary with properties as keys and lists of strings
902
as values. To match, a revision may match any of the supplied strings
903
within a single property but must match at least one string for each
905
:param log_rev_iterator: An input iterator containing all revisions that
906
could be displayed, in lists.
907
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
911
return log_rev_iterator
912
# Use lazy_compile so mapping to InvalidPattern error occurs.
913
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
914
for k, v in match.items()]
915
return _filter_re(searchRE, log_rev_iterator)
918
def _filter_re(searchRE, log_rev_iterator):
919
for revs in log_rev_iterator:
920
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
925
def _match_filter(searchRE, rev):
927
'message': (rev.message,),
928
'committer': (rev.committer,),
929
'author': (rev.get_apparent_authors()),
930
'bugs': list(rev.iter_bugs())
932
strings[''] = [item for inner_list in strings.values()
933
for item in inner_list]
934
for k, v in searchRE:
935
if k in strings and not _match_any_filter(strings[k], v):
940
def _match_any_filter(strings, res):
941
return any(r.search(s) for r in res for s in strings)
944
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
945
fileids=None, direction='reverse'):
946
"""Add revision deltas to a log iterator if needed.
948
:param branch: The branch being logged.
949
:param generate_delta: Whether to generate a delta for each revision.
950
Permitted values are None, 'full' and 'partial'.
951
:param search: A user text search string.
952
:param log_rev_iterator: An input iterator containing all revisions that
953
could be displayed, in lists.
954
:param fileids: If non empty, only revisions matching one or more of
955
the file-ids are to be kept.
956
:param direction: the direction in which view_revisions is sorted
957
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
960
if not generate_delta and not fileids:
961
return log_rev_iterator
962
return _generate_deltas(branch.repository, log_rev_iterator,
963
generate_delta, fileids, direction)
966
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
968
"""Create deltas for each batch of revisions in log_rev_iterator.
970
If we're only generating deltas for the sake of filtering against
971
file-ids, we stop generating deltas once all file-ids reach the
972
appropriate life-cycle point. If we're receiving data newest to
973
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
975
check_fileids = fileids is not None and len(fileids) > 0
977
fileid_set = set(fileids)
978
if direction == 'reverse':
984
for revs in log_rev_iterator:
985
# If we were matching against fileids and we've run out,
986
# there's nothing left to do
987
if check_fileids and not fileid_set:
989
revisions = [rev[1] for rev in revs]
991
if delta_type == 'full' and not check_fileids:
992
deltas = repository.get_deltas_for_revisions(revisions)
993
for rev, delta in zip(revs, deltas):
994
new_revs.append((rev[0], rev[1], delta))
996
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
997
for rev, delta in zip(revs, deltas):
999
if delta is None or not delta.has_changed():
1002
_update_fileids(delta, fileid_set, stop_on)
1003
if delta_type is None:
1005
elif delta_type == 'full':
1006
# If the file matches all the time, rebuilding
1007
# a full delta like this in addition to a partial
1008
# one could be slow. However, it's likely that
1009
# most revisions won't get this far, making it
1010
# faster to filter on the partial deltas and
1011
# build the occasional full delta than always
1012
# building full deltas and filtering those.
1014
delta = repository.get_revision_delta(rev_id)
1015
new_revs.append((rev[0], rev[1], delta))
1019
def _update_fileids(delta, fileids, stop_on):
1020
"""Update the set of file-ids to search based on file lifecycle events.
1022
:param fileids: a set of fileids to update
1023
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1024
fileids set once their add or remove entry is detected respectively
1026
if stop_on == 'add':
1027
for item in delta.added:
1028
if item[1] in fileids:
1029
fileids.remove(item[1])
1030
elif stop_on == 'delete':
1031
for item in delta.removed:
1032
if item[1] in fileids:
1033
fileids.remove(item[1])
1036
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1037
"""Extract revision objects from the repository
1039
:param branch: The branch being logged.
1040
:param generate_delta: Whether to generate a delta for each revision.
1041
:param search: A user text search string.
1042
:param log_rev_iterator: An input iterator containing all revisions that
1043
could be displayed, in lists.
1044
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1047
repository = branch.repository
1048
for revs in log_rev_iterator:
1049
# r = revision_id, n = revno, d = merge depth
1050
revision_ids = [view[0] for view, _, _ in revs]
1051
revisions = dict(repository.iter_revisions(revision_ids))
1052
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1055
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1056
"""Group up a single large batch into smaller ones.
1058
:param branch: The branch being logged.
1059
:param generate_delta: Whether to generate a delta for each revision.
1060
:param search: A user text search string.
1061
:param log_rev_iterator: An input iterator containing all revisions that
1062
could be displayed, in lists.
1063
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1067
for batch in log_rev_iterator:
1070
step = [detail for _, detail in zip(range(num), batch)]
1074
num = min(int(num * 1.5), 200)
1077
def _get_revision_limits(branch, start_revision, end_revision):
1078
"""Get and check revision limits.
1080
:param branch: The branch containing the revisions.
1082
:param start_revision: The first revision to be logged.
1083
but for merge revision support a RevisionInfo is expected.
1085
:param end_revision: The last revision to be logged.
1086
For backwards compatibility this may be a mainline integer revno,
1087
but for merge revision support a RevisionInfo is expected.
1089
:return: (start_rev_id, end_rev_id) tuple.
1093
if start_revision is not None:
1094
if not isinstance(start_revision, revisionspec.RevisionInfo):
1095
raise TypeError(start_revision)
1096
start_rev_id = start_revision.rev_id
1097
start_revno = start_revision.revno
1098
if start_revno is None:
1103
if end_revision is not None:
1104
if not isinstance(end_revision, revisionspec.RevisionInfo):
1105
raise TypeError(start_revision)
1106
end_rev_id = end_revision.rev_id
1107
end_revno = end_revision.revno
1108
if end_revno is None:
1110
end_revno = branch.revno()
1111
except errors.GhostRevisionsHaveNoRevno:
1114
if branch.last_revision() != _mod_revision.NULL_REVISION:
1115
if (start_rev_id == _mod_revision.NULL_REVISION
1116
or end_rev_id == _mod_revision.NULL_REVISION):
1117
raise errors.BzrCommandError(
1118
gettext('Logging revision 0 is invalid.'))
1119
if end_revno is not None and start_revno > end_revno:
1120
raise errors.BzrCommandError(
1121
gettext("Start revision must be older than the end revision."))
1122
return (start_rev_id, end_rev_id)
1125
def _get_mainline_revs(branch, start_revision, end_revision):
1126
"""Get the mainline revisions from the branch.
1128
Generates the list of mainline revisions for the branch.
1130
:param branch: The branch containing the revisions.
1132
:param start_revision: The first revision to be logged.
1133
For backwards compatibility this may be a mainline integer revno,
1134
but for merge revision support a RevisionInfo is expected.
1136
:param end_revision: The last revision to be logged.
1137
For backwards compatibility this may be a mainline integer revno,
1138
but for merge revision support a RevisionInfo is expected.
1140
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1142
branch_revno, branch_last_revision = branch.last_revision_info()
1143
if branch_revno == 0:
1144
return None, None, None, None
1146
# For mainline generation, map start_revision and end_revision to
1147
# mainline revnos. If the revision is not on the mainline choose the
1148
# appropriate extreme of the mainline instead - the extra will be
1150
# Also map the revisions to rev_ids, to be used in the later filtering
1153
if start_revision is None:
1156
if isinstance(start_revision, revisionspec.RevisionInfo):
1157
start_rev_id = start_revision.rev_id
1158
start_revno = start_revision.revno or 1
1160
branch.check_real_revno(start_revision)
1161
start_revno = start_revision
1164
if end_revision is None:
1165
end_revno = branch_revno
1167
if isinstance(end_revision, revisionspec.RevisionInfo):
1168
end_rev_id = end_revision.rev_id
1169
end_revno = end_revision.revno or branch_revno
1171
branch.check_real_revno(end_revision)
1172
end_revno = end_revision
1174
if ((start_rev_id == _mod_revision.NULL_REVISION)
1175
or (end_rev_id == _mod_revision.NULL_REVISION)):
1176
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1177
if start_revno > end_revno:
1178
raise errors.BzrCommandError(gettext("Start revision must be older "
1179
"than the end revision."))
1181
if end_revno < start_revno:
1182
return None, None, None, None
1183
cur_revno = branch_revno
1186
graph = branch.repository.get_graph()
1187
for revision_id in graph.iter_lefthand_ancestry(
1188
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1189
if cur_revno < start_revno:
1190
# We have gone far enough, but we always add 1 more revision
1191
rev_nos[revision_id] = cur_revno
1192
mainline_revs.append(revision_id)
1194
if cur_revno <= end_revno:
1195
rev_nos[revision_id] = cur_revno
1196
mainline_revs.append(revision_id)
1199
# We walked off the edge of all revisions, so we add a 'None' marker
1200
mainline_revs.append(None)
1202
mainline_revs.reverse()
1204
# override the mainline to look like the revision history.
1205
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1208
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1209
include_merges=True):
1210
r"""Return the list of revision ids which touch a given file id.
1212
The function filters view_revisions and returns a subset.
1213
This includes the revisions which directly change the file id,
1214
and the revisions which merge these changes. So if the
1227
And 'C' changes a file, then both C and D will be returned. F will not be
1228
returned even though it brings the changes to C into the branch starting
1229
with E. (Note that if we were using F as the tip instead of G, then we
1232
This will also be restricted based on a subset of the mainline.
1234
:param branch: The branch where we can get text revision information.
1236
:param file_id: Filter out revisions that do not touch file_id.
1238
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1239
tuples. This is the list of revisions which will be filtered. It is
1240
assumed that view_revisions is in merge_sort order (i.e. newest
1243
:param include_merges: include merge revisions in the result or not
1245
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1247
# Lookup all possible text keys to determine which ones actually modified
1249
graph = branch.repository.get_file_graph()
1250
get_parent_map = graph.get_parent_map
1251
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1253
# Looking up keys in batches of 1000 can cut the time in half, as well as
1254
# memory consumption. GraphIndex *does* like to look for a few keys in
1255
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1256
# TODO: This code needs to be re-evaluated periodically as we tune the
1257
# indexing layer. We might consider passing in hints as to the known
1258
# access pattern (sparse/clustered, high success rate/low success
1259
# rate). This particular access is clustered with a low success rate.
1260
modified_text_revisions = set()
1262
for start in range(0, len(text_keys), chunk_size):
1263
next_keys = text_keys[start:start + chunk_size]
1264
# Only keep the revision_id portion of the key
1265
modified_text_revisions.update(
1266
[k[1] for k in get_parent_map(next_keys)])
1267
del text_keys, next_keys
1270
# Track what revisions will merge the current revision, replace entries
1271
# with 'None' when they have been added to result
1272
current_merge_stack = [None]
1273
for info in view_revisions:
1274
rev_id, revno, depth = info
1275
if depth == len(current_merge_stack):
1276
current_merge_stack.append(info)
1278
del current_merge_stack[depth + 1:]
1279
current_merge_stack[-1] = info
1281
if rev_id in modified_text_revisions:
1282
# This needs to be logged, along with the extra revisions
1283
for idx in range(len(current_merge_stack)):
1284
node = current_merge_stack[idx]
1285
if node is not None:
1286
if include_merges or node[2] == 0:
1288
current_merge_stack[idx] = None
1292
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1293
"""Reverse revisions by depth.
1295
Revisions with a different depth are sorted as a group with the previous
1296
revision of that depth. There may be no topological justification for this
1297
but it looks much nicer.
1299
# Add a fake revision at start so that we can always attach sub revisions
1300
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1302
for val in merge_sorted_revisions:
1303
if val[2] == _depth:
1304
# Each revision at the current depth becomes a chunk grouping all
1305
# higher depth revisions.
1306
zd_revisions.append([val])
1308
zd_revisions[-1].append(val)
1309
for revisions in zd_revisions:
1310
if len(revisions) > 1:
1311
# We have higher depth revisions, let reverse them locally
1312
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1313
zd_revisions.reverse()
1315
for chunk in zd_revisions:
1316
result.extend(chunk)
1318
# Top level call, get rid of the fake revisions that have been added
1319
result = [r for r in result if r[0] is not None and r[1] is not None]
1323
class LogRevision(object):
1324
"""A revision to be logged (by LogFormatter.log_revision).
1326
A simple wrapper for the attributes of a revision to be logged.
1327
The attributes may or may not be populated, as determined by the
1328
logging options and the log formatter capabilities.
1331
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1332
tags=None, diff=None, signature=None):
1337
self.revno = str(revno)
1338
self.merge_depth = merge_depth
1342
self.signature = signature
1345
class LogFormatter(object):
1346
"""Abstract class to display log messages.
1348
At a minimum, a derived class must implement the log_revision method.
1350
If the LogFormatter needs to be informed of the beginning or end of
1351
a log it should implement the begin_log and/or end_log hook methods.
1353
A LogFormatter should define the following supports_XXX flags
1354
to indicate which LogRevision attributes it supports:
1356
- supports_delta must be True if this log formatter supports delta.
1357
Otherwise the delta attribute may not be populated. The 'delta_format'
1358
attribute describes whether the 'short_status' format (1) or the long
1359
one (2) should be used.
1361
- supports_merge_revisions must be True if this log formatter supports
1362
merge revisions. If not, then only mainline revisions will be passed
1365
- preferred_levels is the number of levels this formatter defaults to.
1366
The default value is zero meaning display all levels.
1367
This value is only relevant if supports_merge_revisions is True.
1369
- supports_tags must be True if this log formatter supports tags.
1370
Otherwise the tags attribute may not be populated.
1372
- supports_diff must be True if this log formatter supports diffs.
1373
Otherwise the diff attribute may not be populated.
1375
- supports_signatures must be True if this log formatter supports GPG
1378
Plugins can register functions to show custom revision properties using
1379
the properties_handler_registry. The registered function
1380
must respect the following interface description::
1382
def my_show_properties(properties_dict):
1383
# code that returns a dict {'name':'value'} of the properties
1386
preferred_levels = 0
1388
def __init__(self, to_file, show_ids=False, show_timezone='original',
1389
delta_format=None, levels=None, show_advice=False,
1390
to_exact_file=None, author_list_handler=None):
1391
"""Create a LogFormatter.
1393
:param to_file: the file to output to
1394
:param to_exact_file: if set, gives an output stream to which
1395
non-Unicode diffs are written.
1396
:param show_ids: if True, revision-ids are to be displayed
1397
:param show_timezone: the timezone to use
1398
:param delta_format: the level of delta information to display
1399
or None to leave it to the formatter to decide
1400
:param levels: the number of levels to display; None or -1 to
1401
let the log formatter decide.
1402
:param show_advice: whether to show advice at the end of the
1404
:param author_list_handler: callable generating a list of
1405
authors to display for a given revision
1407
self.to_file = to_file
1408
# 'exact' stream used to show diff, it should print content 'as is'
1409
# and should not try to decode/encode it to unicode to avoid bug
1411
if to_exact_file is not None:
1412
self.to_exact_file = to_exact_file
1414
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1415
# better for code that expects to get diffs to pass in the exact
1417
self.to_exact_file = getattr(to_file, 'stream', to_file)
1418
self.show_ids = show_ids
1419
self.show_timezone = show_timezone
1420
if delta_format is None:
1421
# Ensures backward compatibility
1422
delta_format = 2 # long format
1423
self.delta_format = delta_format
1424
self.levels = levels
1425
self._show_advice = show_advice
1426
self._merge_count = 0
1427
self._author_list_handler = author_list_handler
1429
def get_levels(self):
1430
"""Get the number of levels to display or 0 for all."""
1431
if getattr(self, 'supports_merge_revisions', False):
1432
if self.levels is None or self.levels == -1:
1433
self.levels = self.preferred_levels
1438
def log_revision(self, revision):
1441
:param revision: The LogRevision to be logged.
1443
raise NotImplementedError('not implemented in abstract base')
1445
def show_advice(self):
1446
"""Output user advice, if any, when the log is completed."""
1447
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1448
advice_sep = self.get_advice_separator()
1450
self.to_file.write(advice_sep)
1452
"Use --include-merged or -n0 to see merged revisions.\n")
1454
def get_advice_separator(self):
1455
"""Get the text separating the log from the closing advice."""
1458
def short_committer(self, rev):
1459
name, address = config.parse_username(rev.committer)
1464
def short_author(self, rev):
1465
return self.authors(rev, 'first', short=True, sep=', ')
1467
def authors(self, rev, who, short=False, sep=None):
1468
"""Generate list of authors, taking --authors option into account.
1470
The caller has to specify the name of a author list handler,
1471
as provided by the author list registry, using the ``who``
1472
argument. That name only sets a default, though: when the
1473
user selected a different author list generation using the
1474
``--authors`` command line switch, as represented by the
1475
``author_list_handler`` constructor argument, that value takes
1478
:param rev: The revision for which to generate the list of authors.
1479
:param who: Name of the default handler.
1480
:param short: Whether to shorten names to either name or address.
1481
:param sep: What separator to use for automatic concatenation.
1483
if self._author_list_handler is not None:
1484
# The user did specify --authors, which overrides the default
1485
author_list_handler = self._author_list_handler
1487
# The user didn't specify --authors, so we use the caller's default
1488
author_list_handler = author_list_registry.get(who)
1489
names = author_list_handler(rev)
1491
for i in range(len(names)):
1492
name, address = config.parse_username(names[i])
1498
names = sep.join(names)
1501
def merge_marker(self, revision):
1502
"""Get the merge marker to include in the output or '' if none."""
1503
if len(revision.rev.parent_ids) > 1:
1504
self._merge_count += 1
1509
def show_properties(self, revision, indent):
1510
"""Displays the custom properties returned by each registered handler.
1512
If a registered handler raises an error it is propagated.
1514
for line in self.custom_properties(revision):
1515
self.to_file.write("%s%s\n" % (indent, line))
1517
def custom_properties(self, revision):
1518
"""Format the custom properties returned by each registered handler.
1520
If a registered handler raises an error it is propagated.
1522
:return: a list of formatted lines (excluding trailing newlines)
1524
lines = self._foreign_info_properties(revision)
1525
for key, handler in properties_handler_registry.iteritems():
1527
lines.extend(self._format_properties(handler(revision)))
1529
trace.log_exception_quietly()
1530
trace.print_exception(sys.exc_info(), self.to_file)
1533
def _foreign_info_properties(self, rev):
1534
"""Custom log displayer for foreign revision identifiers.
1536
:param rev: Revision object.
1538
# Revision comes directly from a foreign repository
1539
if isinstance(rev, foreign.ForeignRevision):
1540
return self._format_properties(
1541
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1543
# Imported foreign revision revision ids always contain :
1544
if b":" not in rev.revision_id:
1547
# Revision was once imported from a foreign repository
1549
foreign_revid, mapping = \
1550
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1551
except errors.InvalidRevisionId:
1554
return self._format_properties(
1555
mapping.vcs.show_foreign_revid(foreign_revid))
1557
def _format_properties(self, properties):
1559
for key, value in properties.items():
1560
lines.append(key + ': ' + value)
1563
def show_diff(self, to_file, diff, indent):
1564
encoding = get_terminal_encoding()
1565
for l in diff.rstrip().split(b'\n'):
1566
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1569
# Separator between revisions in long format
1570
_LONG_SEP = '-' * 60
1573
class LongLogFormatter(LogFormatter):
1575
supports_merge_revisions = True
1576
preferred_levels = 1
1577
supports_delta = True
1578
supports_tags = True
1579
supports_diff = True
1580
supports_signatures = True
1582
def __init__(self, *args, **kwargs):
1583
super(LongLogFormatter, self).__init__(*args, **kwargs)
1584
if self.show_timezone == 'original':
1585
self.date_string = self._date_string_original_timezone
1587
self.date_string = self._date_string_with_timezone
1589
def _date_string_with_timezone(self, rev):
1590
return format_date(rev.timestamp, rev.timezone or 0,
1593
def _date_string_original_timezone(self, rev):
1594
return format_date_with_offset_in_original_timezone(rev.timestamp,
1597
def log_revision(self, revision):
1598
"""Log a revision, either merged or not."""
1599
indent = ' ' * revision.merge_depth
1601
if revision.revno is not None:
1602
lines.append('revno: %s%s' % (revision.revno,
1603
self.merge_marker(revision)))
1605
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1606
if self.show_ids or revision.revno is None:
1607
lines.append('revision-id: %s' %
1608
(revision.rev.revision_id.decode('utf-8'),))
1610
for parent_id in revision.rev.parent_ids:
1611
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1612
lines.extend(self.custom_properties(revision.rev))
1614
committer = revision.rev.committer
1615
authors = self.authors(revision.rev, 'all')
1616
if authors != [committer]:
1617
lines.append('author: %s' % (", ".join(authors),))
1618
lines.append('committer: %s' % (committer,))
1620
branch_nick = revision.rev.properties.get('branch-nick', None)
1621
if branch_nick is not None:
1622
lines.append('branch nick: %s' % (branch_nick,))
1624
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1626
if revision.signature is not None:
1627
lines.append('signature: ' + revision.signature)
1629
lines.append('message:')
1630
if not revision.rev.message:
1631
lines.append(' (no message)')
1633
message = revision.rev.message.rstrip('\r\n')
1634
for l in message.split('\n'):
1635
lines.append(' %s' % (l,))
1637
# Dump the output, appending the delta and diff if requested
1638
to_file = self.to_file
1639
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1640
if revision.delta is not None:
1641
# Use the standard status output to display changes
1642
from breezy.delta import report_delta
1643
report_delta(to_file, revision.delta, short_status=False,
1644
show_ids=self.show_ids, indent=indent)
1645
if revision.diff is not None:
1646
to_file.write(indent + 'diff:\n')
1648
# Note: we explicitly don't indent the diff (relative to the
1649
# revision information) so that the output can be fed to patch -p0
1650
self.show_diff(self.to_exact_file, revision.diff, indent)
1651
self.to_exact_file.flush()
1653
def get_advice_separator(self):
1654
"""Get the text separating the log from the closing advice."""
1655
return '-' * 60 + '\n'
1658
class ShortLogFormatter(LogFormatter):
1660
supports_merge_revisions = True
1661
preferred_levels = 1
1662
supports_delta = True
1663
supports_tags = True
1664
supports_diff = True
1666
def __init__(self, *args, **kwargs):
1667
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1668
self.revno_width_by_depth = {}
1670
def log_revision(self, revision):
1671
# We need two indents: one per depth and one for the information
1672
# relative to that indent. Most mainline revnos are 5 chars or
1673
# less while dotted revnos are typically 11 chars or less. Once
1674
# calculated, we need to remember the offset for a given depth
1675
# as we might be starting from a dotted revno in the first column
1676
# and we want subsequent mainline revisions to line up.
1677
depth = revision.merge_depth
1678
indent = ' ' * depth
1679
revno_width = self.revno_width_by_depth.get(depth)
1680
if revno_width is None:
1681
if revision.revno is None or revision.revno.find('.') == -1:
1682
# mainline revno, e.g. 12345
1685
# dotted revno, e.g. 12345.10.55
1687
self.revno_width_by_depth[depth] = revno_width
1688
offset = ' ' * (revno_width + 1)
1690
to_file = self.to_file
1693
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1694
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1695
revision.revno or "", self.short_author(
1697
format_date(revision.rev.timestamp,
1698
revision.rev.timezone or 0,
1699
self.show_timezone, date_fmt="%Y-%m-%d",
1701
tags, self.merge_marker(revision)))
1702
self.show_properties(revision.rev, indent + offset)
1703
if self.show_ids or revision.revno is None:
1704
to_file.write(indent + offset + 'revision-id:%s\n'
1705
% (revision.rev.revision_id.decode('utf-8'),))
1706
if not revision.rev.message:
1707
to_file.write(indent + offset + '(no message)\n')
1709
message = revision.rev.message.rstrip('\r\n')
1710
for l in message.split('\n'):
1711
to_file.write(indent + offset + '%s\n' % (l,))
1713
if revision.delta is not None:
1714
# Use the standard status output to display changes
1715
from breezy.delta import report_delta
1716
report_delta(to_file, revision.delta,
1717
short_status=self.delta_format == 1,
1718
show_ids=self.show_ids, indent=indent + offset)
1719
if revision.diff is not None:
1720
self.show_diff(self.to_exact_file, revision.diff, ' ')
1724
class LineLogFormatter(LogFormatter):
1726
supports_merge_revisions = True
1727
preferred_levels = 1
1728
supports_tags = True
1730
def __init__(self, *args, **kwargs):
1731
super(LineLogFormatter, self).__init__(*args, **kwargs)
1732
width = terminal_width()
1733
if width is not None:
1734
# we need one extra space for terminals that wrap on last char
1736
self._max_chars = width
1738
def truncate(self, str, max_len):
1739
if max_len is None or len(str) <= max_len:
1741
return str[:max_len - 3] + '...'
1743
def date_string(self, rev):
1744
return format_date(rev.timestamp, rev.timezone or 0,
1745
self.show_timezone, date_fmt="%Y-%m-%d",
1748
def message(self, rev):
1750
return '(no message)'
1754
def log_revision(self, revision):
1755
indent = ' ' * revision.merge_depth
1756
self.to_file.write(self.log_string(revision.revno, revision.rev,
1757
self._max_chars, revision.tags, indent))
1758
self.to_file.write('\n')
1760
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1761
"""Format log info into one string. Truncate tail of string
1763
:param revno: revision number or None.
1764
Revision numbers counts from 1.
1765
:param rev: revision object
1766
:param max_chars: maximum length of resulting string
1767
:param tags: list of tags or None
1768
:param prefix: string to prefix each line
1769
:return: formatted truncated string
1773
# show revno only when is not None
1774
out.append("%s:" % revno)
1775
if max_chars is not None:
1776
out.append(self.truncate(
1777
self.short_author(rev), (max_chars + 3) // 4))
1779
out.append(self.short_author(rev))
1780
out.append(self.date_string(rev))
1781
if len(rev.parent_ids) > 1:
1782
out.append('[merge]')
1784
tag_str = '{%s}' % (', '.join(sorted(tags)))
1786
out.append(rev.get_summary())
1787
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1790
class GnuChangelogLogFormatter(LogFormatter):
1792
supports_merge_revisions = True
1793
supports_delta = True
1795
def log_revision(self, revision):
1796
"""Log a revision, either merged or not."""
1797
to_file = self.to_file
1799
date_str = format_date(revision.rev.timestamp,
1800
revision.rev.timezone or 0,
1802
date_fmt='%Y-%m-%d',
1804
committer_str = self.authors(revision.rev, 'first', sep=', ')
1805
committer_str = committer_str.replace(' <', ' <')
1806
to_file.write('%s %s\n\n' % (date_str, committer_str))
1808
if revision.delta is not None and revision.delta.has_changed():
1809
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1811
to_file.write('\t* %s:\n' % (path,))
1812
for c in revision.delta.renamed:
1813
oldpath, newpath = c[:2]
1814
# For renamed files, show both the old and the new path
1815
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1818
if not revision.rev.message:
1819
to_file.write('\tNo commit message\n')
1821
message = revision.rev.message.rstrip('\r\n')
1822
for l in message.split('\n'):
1823
to_file.write('\t%s\n' % (l.lstrip(),))
1827
def line_log(rev, max_chars):
1828
lf = LineLogFormatter(None)
1829
return lf.log_string(None, rev, max_chars)
1832
class LogFormatterRegistry(registry.Registry):
1833
"""Registry for log formatters"""
1835
def make_formatter(self, name, *args, **kwargs):
1836
"""Construct a formatter from arguments.
1838
:param name: Name of the formatter to construct. 'short', 'long' and
1839
'line' are built-in.
1841
return self.get(name)(*args, **kwargs)
1843
def get_default(self, branch):
1844
c = branch.get_config_stack()
1845
return self.get(c.get('log_format'))
1848
log_formatter_registry = LogFormatterRegistry()
1851
log_formatter_registry.register('short', ShortLogFormatter,
1852
'Moderately short log format.')
1853
log_formatter_registry.register('long', LongLogFormatter,
1854
'Detailed log format.')
1855
log_formatter_registry.register('line', LineLogFormatter,
1856
'Log format with one line per revision.')
1857
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1858
'Format used by GNU ChangeLog files.')
1861
def register_formatter(name, formatter):
1862
log_formatter_registry.register(name, formatter)
1865
def log_formatter(name, *args, **kwargs):
1866
"""Construct a formatter from arguments.
1868
name -- Name of the formatter to construct; currently 'long', 'short' and
1869
'line' are supported.
1872
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1874
raise errors.BzrCommandError(
1875
gettext("unknown log formatter: %r") % name)
1878
def author_list_all(rev):
1879
return rev.get_apparent_authors()[:]
1882
def author_list_first(rev):
1883
lst = rev.get_apparent_authors()
1890
def author_list_committer(rev):
1891
return [rev.committer]
1894
author_list_registry = registry.Registry()
1896
author_list_registry.register('all', author_list_all,
1899
author_list_registry.register('first', author_list_first,
1902
author_list_registry.register('committer', author_list_committer,
1906
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1908
"""Show the change in revision history comparing the old revision history to the new one.
1910
:param branch: The branch where the revisions exist
1911
:param old_rh: The old revision history
1912
:param new_rh: The new revision history
1913
:param to_file: A file to write the results to. If None, stdout will be used
1916
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1918
lf = log_formatter(log_format,
1921
show_timezone='original')
1923
# This is the first index which is different between
1926
for i in range(max(len(new_rh), len(old_rh))):
1927
if (len(new_rh) <= i
1929
or new_rh[i] != old_rh[i]):
1933
if base_idx is None:
1934
to_file.write('Nothing seems to have changed\n')
1936
# TODO: It might be nice to do something like show_log
1937
# and show the merged entries. But since this is the
1938
# removed revisions, it shouldn't be as important
1939
if base_idx < len(old_rh):
1940
to_file.write('*' * 60)
1941
to_file.write('\nRemoved Revisions:\n')
1942
for i in range(base_idx, len(old_rh)):
1943
rev = branch.repository.get_revision(old_rh[i])
1944
lr = LogRevision(rev, i + 1, 0, None)
1946
to_file.write('*' * 60)
1947
to_file.write('\n\n')
1948
if base_idx < len(new_rh):
1949
to_file.write('Added Revisions:\n')
1954
direction='forward',
1955
start_revision=base_idx + 1,
1956
end_revision=len(new_rh),
1960
def get_history_change(old_revision_id, new_revision_id, repository):
1961
"""Calculate the uncommon lefthand history between two revisions.
1963
:param old_revision_id: The original revision id.
1964
:param new_revision_id: The new revision id.
1965
:param repository: The repository to use for the calculation.
1967
return old_history, new_history
1970
old_revisions = set()
1972
new_revisions = set()
1973
graph = repository.get_graph()
1974
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1975
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1976
stop_revision = None
1979
while do_new or do_old:
1982
new_revision = next(new_iter)
1983
except StopIteration:
1986
new_history.append(new_revision)
1987
new_revisions.add(new_revision)
1988
if new_revision in old_revisions:
1989
stop_revision = new_revision
1993
old_revision = next(old_iter)
1994
except StopIteration:
1997
old_history.append(old_revision)
1998
old_revisions.add(old_revision)
1999
if old_revision in new_revisions:
2000
stop_revision = old_revision
2002
new_history.reverse()
2003
old_history.reverse()
2004
if stop_revision is not None:
2005
new_history = new_history[new_history.index(stop_revision) + 1:]
2006
old_history = old_history[old_history.index(stop_revision) + 1:]
2007
return old_history, new_history
2010
def show_branch_change(branch, output, old_revno, old_revision_id):
2011
"""Show the changes made to a branch.
2013
:param branch: The branch to show changes about.
2014
:param output: A file-like object to write changes to.
2015
:param old_revno: The revno of the old tip.
2016
:param old_revision_id: The revision_id of the old tip.
2018
new_revno, new_revision_id = branch.last_revision_info()
2019
old_history, new_history = get_history_change(old_revision_id,
2022
if old_history == [] and new_history == []:
2023
output.write('Nothing seems to have changed\n')
2026
log_format = log_formatter_registry.get_default(branch)
2027
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2028
if old_history != []:
2029
output.write('*' * 60)
2030
output.write('\nRemoved Revisions:\n')
2031
show_flat_log(branch.repository, old_history, old_revno, lf)
2032
output.write('*' * 60)
2033
output.write('\n\n')
2034
if new_history != []:
2035
output.write('Added Revisions:\n')
2036
start_revno = new_revno - len(new_history) + 1
2037
show_log(branch, lf, None, verbose=False, direction='forward',
2038
start_revision=start_revno)
2041
def show_flat_log(repository, history, last_revno, lf):
2042
"""Show a simple log of the specified history.
2044
:param repository: The repository to retrieve revisions from.
2045
:param history: A list of revision_ids indicating the lefthand history.
2046
:param last_revno: The revno of the last revision_id in the history.
2047
:param lf: The log formatter to use.
2049
revisions = repository.get_revisions(history)
2050
for i, rev in enumerate(revisions):
2051
lr = LogRevision(rev, i + last_revno, 0, None)
2055
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2056
"""Find file-ids and kinds given a list of files and a revision range.
2058
We search for files at the end of the range. If not found there,
2059
we try the start of the range.
2061
:param revisionspec_list: revision range as parsed on the command line
2062
:param file_list: the list of paths given on the command line;
2063
the first of these can be a branch location or a file path,
2064
the remainder must be file paths
2065
:param add_cleanup: When the branch returned is read locked,
2066
an unlock call will be queued to the cleanup.
2067
:return: (branch, info_list, start_rev_info, end_rev_info) where
2068
info_list is a list of (relative_path, file_id, kind) tuples where
2069
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2070
branch will be read-locked.
2072
from breezy.builtins import _get_revision_range
2073
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2075
add_cleanup(b.lock_read().unlock)
2076
# XXX: It's damn messy converting a list of paths to relative paths when
2077
# those paths might be deleted ones, they might be on a case-insensitive
2078
# filesystem and/or they might be in silly locations (like another branch).
2079
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2080
# file2 implicitly in the same dir as file1 or should its directory be
2081
# taken from the current tree somehow?) For now, this solves the common
2082
# case of running log in a nested directory, assuming paths beyond the
2083
# first one haven't been deleted ...
2085
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2087
relpaths = [path] + file_list[1:]
2089
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2091
if relpaths in ([], [u'']):
2092
return b, [], start_rev_info, end_rev_info
2093
if start_rev_info is None and end_rev_info is None:
2095
tree = b.basis_tree()
2098
file_id = tree.path2id(fp)
2099
kind = _get_kind_for_file_id(tree, fp, file_id)
2101
# go back to when time began
2104
rev1 = b.get_rev_id(1)
2105
except errors.NoSuchRevision:
2110
tree1 = b.repository.revision_tree(rev1)
2112
file_id = tree1.path2id(fp)
2113
kind = _get_kind_for_file_id(tree1, fp, file_id)
2114
info_list.append((fp, file_id, kind))
2116
elif start_rev_info == end_rev_info:
2117
# One revision given - file must exist in it
2118
tree = b.repository.revision_tree(end_rev_info.rev_id)
2120
file_id = tree.path2id(fp)
2121
kind = _get_kind_for_file_id(tree, fp, file_id)
2122
info_list.append((fp, file_id, kind))
2125
# Revision range given. Get the file-id from the end tree.
2126
# If that fails, try the start tree.
2127
rev_id = end_rev_info.rev_id
2129
tree = b.basis_tree()
2131
tree = b.repository.revision_tree(rev_id)
2134
file_id = tree.path2id(fp)
2135
kind = _get_kind_for_file_id(tree, fp, file_id)
2138
rev_id = start_rev_info.rev_id
2140
rev1 = b.get_rev_id(1)
2141
tree1 = b.repository.revision_tree(rev1)
2143
tree1 = b.repository.revision_tree(rev_id)
2144
file_id = tree1.path2id(fp)
2145
kind = _get_kind_for_file_id(tree1, fp, file_id)
2146
info_list.append((fp, file_id, kind))
2147
return b, info_list, start_rev_info, end_rev_info
2150
def _get_kind_for_file_id(tree, path, file_id):
2151
"""Return the kind of a file-id or None if it doesn't exist."""
2152
if file_id is not None:
2153
return tree.kind(path)
2158
properties_handler_registry = registry.Registry()
2160
# Use the properties handlers to print out bug information if available
2163
def _bugs_properties_handler(revision):
2165
related_bug_urls = []
2166
for bug_url, status in revision.iter_bugs():
2167
if status == 'fixed':
2168
fixed_bug_urls.append(bug_url)
2169
elif status == 'related':
2170
related_bug_urls.append(bug_url)
2173
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2174
ret[text] = ' '.join(fixed_bug_urls)
2175
if related_bug_urls:
2176
text = ngettext('related bug', 'related bugs',
2177
len(related_bug_urls))
2178
ret[text] = ' '.join(related_bug_urls)
2182
properties_handler_registry.register('bugs_properties_handler',
2183
_bugs_properties_handler)
2186
# adapters which revision ids to log are filtered. When log is called, the
2187
# log_rev_iterator is adapted through each of these factory methods.
2188
# Plugins are welcome to mutate this list in any way they like - as long
2189
# as the overall behaviour is preserved. At this point there is no extensible
2190
# mechanism for getting parameters to each factory method, and until there is
2191
# this won't be considered a stable api.
2195
# read revision objects
2196
_make_revision_objects,
2197
# filter on log messages
2198
_make_search_filter,
2199
# generate deltas for things we will show