1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
68
revision as _mod_revision,
70
from breezy.i18n import gettext, ngettext
78
from .osutils import (
80
format_date_with_offset_in_original_timezone,
81
get_diff_header_encoding,
82
get_terminal_encoding,
90
from .tree import find_previous_path
93
def find_touching_revisions(repository, last_revision, last_tree, last_path):
94
"""Yield a description of revisions which affect the file_id.
96
Each returned element is (revno, revision_id, description)
98
This is the list of revisions where the file is either added,
99
modified, renamed or deleted.
101
TODO: Perhaps some way to limit this to only particular revisions,
102
or to traverse a non-mainline set of revisions?
104
last_verifier = last_tree.get_file_verifier(last_path)
105
graph = repository.get_graph()
106
history = list(graph.iter_lefthand_ancestry(last_revision, []))
108
for revision_id in history:
109
this_tree = repository.revision_tree(revision_id)
110
this_path = find_previous_path(last_tree, this_tree, last_path)
112
# now we know how it was last time, and how it is in this revision.
113
# are those two states effectively the same or not?
114
if this_path is not None and last_path is None:
115
yield revno, revision_id, "deleted " + this_path
116
this_verifier = this_tree.get_file_verifier(this_path)
117
elif this_path is None and last_path is not None:
118
yield revno, revision_id, "added " + last_path
119
elif this_path != last_path:
120
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
121
this_verifier = this_tree.get_file_verifier(this_path)
123
this_verifier = this_tree.get_file_verifier(this_path)
124
if (this_verifier != last_verifier):
125
yield revno, revision_id, "modified " + this_path
127
last_verifier = this_verifier
128
last_path = this_path
129
last_tree = this_tree
130
if last_path is None:
137
specific_fileid=None,
146
"""Write out human-readable log of commits to this branch.
148
This function is being retained for backwards compatibility but
149
should not be extended with new parameters. Use the new Logger class
150
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
151
make_log_request_dict function.
153
:param lf: The LogFormatter object showing the output.
155
:param specific_fileid: If not None, list only the commits affecting the
156
specified file, rather than all commits.
158
:param verbose: If True show added/changed/deleted/renamed files.
160
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
163
:param start_revision: If not None, only show revisions >= start_revision
165
:param end_revision: If not None, only show revisions <= end_revision
167
:param search: If not None, only show revisions with matching commit
170
:param limit: If set, shows only 'limit' revisions, all revisions are shown
173
:param show_diff: If True, output a diff after each revision.
175
:param match: Dictionary of search lists to use when matching revision
178
# Convert old-style parameters to new-style parameters
179
if specific_fileid is not None:
180
file_ids = [specific_fileid]
185
delta_type = 'partial'
192
diff_type = 'partial'
198
if isinstance(start_revision, int):
200
start_revision = revisionspec.RevisionInfo(branch, start_revision)
201
except errors.NoSuchRevision:
202
raise errors.InvalidRevisionNumber(start_revision)
204
if isinstance(end_revision, int):
206
end_revision = revisionspec.RevisionInfo(branch, end_revision)
207
except errors.NoSuchRevision:
208
raise errors.InvalidRevisionNumber(end_revision)
210
if end_revision is not None and end_revision.revno == 0:
211
raise errors.InvalidRevisionNumber(end_revision.revno)
213
# Build the request and execute it
214
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
215
start_revision=start_revision, end_revision=end_revision,
216
limit=limit, message_search=search,
217
delta_type=delta_type, diff_type=diff_type)
218
Logger(branch, rqst).show(lf)
221
# Note: This needs to be kept in sync with the defaults in
222
# make_log_request_dict() below
223
_DEFAULT_REQUEST_PARAMS = {
224
'direction': 'reverse',
226
'generate_tags': True,
227
'exclude_common_ancestry': False,
228
'_match_using_deltas': True,
232
def make_log_request_dict(direction='reverse', specific_fileids=None,
233
start_revision=None, end_revision=None, limit=None,
234
message_search=None, levels=None, generate_tags=True,
236
diff_type=None, _match_using_deltas=True,
237
exclude_common_ancestry=False, match=None,
238
signature=False, omit_merges=False,
240
"""Convenience function for making a logging request dictionary.
242
Using this function may make code slightly safer by ensuring
243
parameters have the correct names. It also provides a reference
244
point for documenting the supported parameters.
246
:param direction: 'reverse' (default) is latest to earliest;
247
'forward' is earliest to latest.
249
:param specific_fileids: If not None, only include revisions
250
affecting the specified files, rather than all revisions.
252
:param start_revision: If not None, only generate
253
revisions >= start_revision
255
:param end_revision: If not None, only generate
256
revisions <= end_revision
258
:param limit: If set, generate only 'limit' revisions, all revisions
259
are shown if None or 0.
261
:param message_search: If not None, only include revisions with
262
matching commit messages
264
:param levels: the number of levels of revisions to
265
generate; 1 for just the mainline; 0 for all levels, or None for
268
:param generate_tags: If True, include tags for matched revisions.
270
:param delta_type: Either 'full', 'partial' or None.
271
'full' means generate the complete delta - adds/deletes/modifies/etc;
272
'partial' means filter the delta using specific_fileids;
273
None means do not generate any delta.
275
:param diff_type: Either 'full', 'partial' or None.
276
'full' means generate the complete diff - adds/deletes/modifies/etc;
277
'partial' means filter the diff using specific_fileids;
278
None means do not generate any diff.
280
:param _match_using_deltas: a private parameter controlling the
281
algorithm used for matching specific_fileids. This parameter
282
may be removed in the future so breezy client code should NOT
285
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
286
range operator or as a graph difference.
288
:param signature: show digital signature information
290
:param match: Dictionary of list of search strings to use when filtering
291
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
292
the empty string to match any of the preceding properties.
294
:param omit_merges: If True, commits with more than one parent are
298
# Take care of old style message_search parameter
301
if 'message' in match:
302
match['message'].append(message_search)
304
match['message'] = [message_search]
306
match = {'message': [message_search]}
308
'direction': direction,
309
'specific_fileids': specific_fileids,
310
'start_revision': start_revision,
311
'end_revision': end_revision,
314
'generate_tags': generate_tags,
315
'delta_type': delta_type,
316
'diff_type': diff_type,
317
'exclude_common_ancestry': exclude_common_ancestry,
318
'signature': signature,
320
'omit_merges': omit_merges,
321
# Add 'private' attributes for features that may be deprecated
322
'_match_using_deltas': _match_using_deltas,
326
def _apply_log_request_defaults(rqst):
327
"""Apply default values to a request dictionary."""
328
result = _DEFAULT_REQUEST_PARAMS.copy()
334
def format_signature_validity(rev_id, branch):
335
"""get the signature validity
337
:param rev_id: revision id to validate
338
:param branch: branch of revision
339
:return: human readable string to print to log
341
from breezy import gpg
343
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
344
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
345
if result[0] == gpg.SIGNATURE_VALID:
346
return u"valid signature from {0}".format(result[1])
347
if result[0] == gpg.SIGNATURE_KEY_MISSING:
348
return "unknown key {0}".format(result[1])
349
if result[0] == gpg.SIGNATURE_NOT_VALID:
350
return "invalid signature!"
351
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
352
return "no signature"
355
class LogGenerator(object):
356
"""A generator of log revisions."""
358
def iter_log_revisions(self):
359
"""Iterate over LogRevision objects.
361
:return: An iterator yielding LogRevision objects.
363
raise NotImplementedError(self.iter_log_revisions)
366
class Logger(object):
367
"""An object that generates, formats and displays a log."""
369
def __init__(self, branch, rqst):
372
:param branch: the branch to log
373
:param rqst: A dictionary specifying the query parameters.
374
See make_log_request_dict() for supported values.
377
self.rqst = _apply_log_request_defaults(rqst)
382
:param lf: The LogFormatter object to send the output to.
384
if not isinstance(lf, LogFormatter):
385
warn("not a LogFormatter instance: %r" % lf)
387
with self.branch.lock_read():
388
if getattr(lf, 'begin_log', None):
391
if getattr(lf, 'end_log', None):
394
def _show_body(self, lf):
395
"""Show the main log output.
397
Subclasses may wish to override this.
399
# Tweak the LogRequest based on what the LogFormatter can handle.
400
# (There's no point generating stuff if the formatter can't display it.)
402
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
403
# user didn't specify levels, use whatever the LF can handle:
404
rqst['levels'] = lf.get_levels()
406
if not getattr(lf, 'supports_tags', False):
407
rqst['generate_tags'] = False
408
if not getattr(lf, 'supports_delta', False):
409
rqst['delta_type'] = None
410
if not getattr(lf, 'supports_diff', False):
411
rqst['diff_type'] = None
412
if not getattr(lf, 'supports_signatures', False):
413
rqst['signature'] = False
415
# Find and print the interesting revisions
416
generator = self._generator_factory(self.branch, rqst)
418
for lr in generator.iter_log_revisions():
420
except errors.GhostRevisionUnusableHere:
421
raise errors.BzrCommandError(
422
gettext('Further revision history missing.'))
425
def _generator_factory(self, branch, rqst):
426
"""Make the LogGenerator object to use.
428
Subclasses may wish to override this.
430
return _DefaultLogGenerator(branch, rqst)
433
class _StartNotLinearAncestor(Exception):
434
"""Raised when a start revision is not found walking left-hand history."""
437
class _DefaultLogGenerator(LogGenerator):
438
"""The default generator of log revisions."""
440
def __init__(self, branch, rqst):
443
if rqst.get('generate_tags') and branch.supports_tags():
444
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
446
self.rev_tag_dict = {}
448
def iter_log_revisions(self):
449
"""Iterate over LogRevision objects.
451
:return: An iterator yielding LogRevision objects.
454
levels = rqst.get('levels')
455
limit = rqst.get('limit')
456
diff_type = rqst.get('diff_type')
457
show_signature = rqst.get('signature')
458
omit_merges = rqst.get('omit_merges')
460
revision_iterator = self._create_log_revision_iterator()
461
for revs in revision_iterator:
462
for (rev_id, revno, merge_depth), rev, delta in revs:
463
# 0 levels means show everything; merge_depth counts from 0
464
if (levels != 0 and merge_depth is not None and
465
merge_depth >= levels):
467
if omit_merges and len(rev.parent_ids) > 1:
470
raise errors.GhostRevisionUnusableHere(rev_id)
471
if diff_type is None:
474
diff = self._format_diff(rev, rev_id, diff_type)
476
signature = format_signature_validity(rev_id, self.branch)
480
rev, revno, merge_depth, delta,
481
self.rev_tag_dict.get(rev_id), diff, signature)
484
if log_count >= limit:
487
def _format_diff(self, rev, rev_id, diff_type):
488
repo = self.branch.repository
489
if len(rev.parent_ids) == 0:
490
ancestor_id = _mod_revision.NULL_REVISION
492
ancestor_id = rev.parent_ids[0]
493
tree_1 = repo.revision_tree(ancestor_id)
494
tree_2 = repo.revision_tree(rev_id)
495
file_ids = self.rqst.get('specific_fileids')
496
if diff_type == 'partial' and file_ids is not None:
497
specific_files = [tree_2.id2path(id) for id in file_ids]
499
specific_files = None
501
path_encoding = get_diff_header_encoding()
502
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
503
new_label='', path_encoding=path_encoding)
506
def _create_log_revision_iterator(self):
507
"""Create a revision iterator for log.
509
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
512
self.start_rev_id, self.end_rev_id = _get_revision_limits(
513
self.branch, self.rqst.get('start_revision'),
514
self.rqst.get('end_revision'))
515
if self.rqst.get('_match_using_deltas'):
516
return self._log_revision_iterator_using_delta_matching()
518
# We're using the per-file-graph algorithm. This scales really
519
# well but only makes sense if there is a single file and it's
521
file_count = len(self.rqst.get('specific_fileids'))
523
raise errors.BzrError(
524
"illegal LogRequest: must match-using-deltas "
525
"when logging %d files" % file_count)
526
return self._log_revision_iterator_using_per_file_graph()
528
def _log_revision_iterator_using_delta_matching(self):
529
# Get the base revisions, filtering by the revision range
531
generate_merge_revisions = rqst.get('levels') != 1
532
delayed_graph_generation = not rqst.get('specific_fileids') and (
533
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
534
view_revisions = _calc_view_revisions(
535
self.branch, self.start_rev_id, self.end_rev_id,
536
rqst.get('direction'),
537
generate_merge_revisions=generate_merge_revisions,
538
delayed_graph_generation=delayed_graph_generation,
539
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
541
# Apply the other filters
542
return make_log_rev_iterator(self.branch, view_revisions,
543
rqst.get('delta_type'), rqst.get('match'),
544
file_ids=rqst.get('specific_fileids'),
545
direction=rqst.get('direction'))
547
def _log_revision_iterator_using_per_file_graph(self):
548
# Get the base revisions, filtering by the revision range.
549
# Note that we always generate the merge revisions because
550
# filter_revisions_touching_file_id() requires them ...
552
view_revisions = _calc_view_revisions(
553
self.branch, self.start_rev_id, self.end_rev_id,
554
rqst.get('direction'), generate_merge_revisions=True,
555
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
556
if not isinstance(view_revisions, list):
557
view_revisions = list(view_revisions)
558
view_revisions = _filter_revisions_touching_file_id(self.branch,
559
rqst.get('specific_fileids')[
561
include_merges=rqst.get('levels') != 1)
562
return make_log_rev_iterator(self.branch, view_revisions,
563
rqst.get('delta_type'), rqst.get('match'))
566
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
567
generate_merge_revisions,
568
delayed_graph_generation=False,
569
exclude_common_ancestry=False,
571
"""Calculate the revisions to view.
573
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
574
a list of the same tuples.
576
if (exclude_common_ancestry and start_rev_id == end_rev_id):
577
raise errors.BzrCommandError(gettext(
578
'--exclude-common-ancestry requires two different revisions'))
579
if direction not in ('reverse', 'forward'):
580
raise ValueError(gettext('invalid direction %r') % direction)
581
br_rev_id = branch.last_revision()
582
if br_rev_id == _mod_revision.NULL_REVISION:
585
if (end_rev_id and start_rev_id == end_rev_id
586
and (not generate_merge_revisions
587
or not _has_merges(branch, end_rev_id))):
588
# If a single revision is requested, check we can handle it
589
return _generate_one_revision(branch, end_rev_id, br_rev_id,
591
if not generate_merge_revisions:
593
# If we only want to see linear revisions, we can iterate ...
594
iter_revs = _linear_view_revisions(
595
branch, start_rev_id, end_rev_id,
596
exclude_common_ancestry=exclude_common_ancestry)
597
# If a start limit was given and it's not obviously an
598
# ancestor of the end limit, check it before outputting anything
599
if (direction == 'forward'
600
or (start_rev_id and not _is_obvious_ancestor(
601
branch, start_rev_id, end_rev_id))):
602
iter_revs = list(iter_revs)
603
if direction == 'forward':
604
iter_revs = reversed(iter_revs)
606
except _StartNotLinearAncestor:
607
# Switch to the slower implementation that may be able to find a
608
# non-obvious ancestor out of the left-hand history.
610
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
611
direction, delayed_graph_generation,
612
exclude_common_ancestry)
613
if direction == 'forward':
614
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
618
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
619
if rev_id == br_rev_id:
621
return [(br_rev_id, br_revno, 0)]
623
revno_str = _compute_revno_str(branch, rev_id)
624
return [(rev_id, revno_str, 0)]
627
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
628
delayed_graph_generation,
629
exclude_common_ancestry=False):
630
# On large trees, generating the merge graph can take 30-60 seconds
631
# so we delay doing it until a merge is detected, incrementally
632
# returning initial (non-merge) revisions while we can.
634
# The above is only true for old formats (<= 0.92), for newer formats, a
635
# couple of seconds only should be needed to load the whole graph and the
636
# other graph operations needed are even faster than that -- vila 100201
637
initial_revisions = []
638
if delayed_graph_generation:
640
for rev_id, revno, depth in _linear_view_revisions(
641
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
642
if _has_merges(branch, rev_id):
643
# The end_rev_id can be nested down somewhere. We need an
644
# explicit ancestry check. There is an ambiguity here as we
645
# may not raise _StartNotLinearAncestor for a revision that
646
# is an ancestor but not a *linear* one. But since we have
647
# loaded the graph to do the check (or calculate a dotted
648
# revno), we may as well accept to show the log... We need
649
# the check only if start_rev_id is not None as all
650
# revisions have _mod_revision.NULL_REVISION as an ancestor
652
graph = branch.repository.get_graph()
653
if (start_rev_id is not None
654
and not graph.is_ancestor(start_rev_id, end_rev_id)):
655
raise _StartNotLinearAncestor()
656
# Since we collected the revisions so far, we need to
661
initial_revisions.append((rev_id, revno, depth))
663
# No merged revisions found
664
return initial_revisions
665
except _StartNotLinearAncestor:
666
# A merge was never detected so the lower revision limit can't
667
# be nested down somewhere
668
raise errors.BzrCommandError(gettext('Start revision not found in'
669
' history of end revision.'))
671
# We exit the loop above because we encounter a revision with merges, from
672
# this revision, we need to switch to _graph_view_revisions.
674
# A log including nested merges is required. If the direction is reverse,
675
# we rebase the initial merge depths so that the development line is
676
# shown naturally, i.e. just like it is for linear logging. We can easily
677
# make forward the exact opposite display, but showing the merge revisions
678
# indented at the end seems slightly nicer in that case.
679
view_revisions = itertools.chain(iter(initial_revisions),
680
_graph_view_revisions(branch, start_rev_id, end_rev_id,
681
rebase_initial_depths=(
682
direction == 'reverse'),
683
exclude_common_ancestry=exclude_common_ancestry))
684
return view_revisions
687
def _has_merges(branch, rev_id):
688
"""Does a revision have multiple parents or not?"""
689
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
690
return len(parents) > 1
693
def _compute_revno_str(branch, rev_id):
694
"""Compute the revno string from a rev_id.
696
:return: The revno string, or None if the revision is not in the supplied
700
revno = branch.revision_id_to_dotted_revno(rev_id)
701
except errors.NoSuchRevision:
702
# The revision must be outside of this branch
705
return '.'.join(str(n) for n in revno)
708
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
709
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
710
if start_rev_id and end_rev_id:
712
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
713
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
714
except errors.NoSuchRevision:
715
# one or both is not in the branch; not obvious
717
if len(start_dotted) == 1 and len(end_dotted) == 1:
719
return start_dotted[0] <= end_dotted[0]
720
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
721
start_dotted[0:1] == end_dotted[0:1]):
722
# both on same development line
723
return start_dotted[2] <= end_dotted[2]
727
# if either start or end is not specified then we use either the first or
728
# the last revision and *they* are obvious ancestors.
732
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
733
exclude_common_ancestry=False):
734
"""Calculate a sequence of revisions to view, newest to oldest.
736
:param start_rev_id: the lower revision-id
737
:param end_rev_id: the upper revision-id
738
:param exclude_common_ancestry: Whether the start_rev_id should be part of
739
the iterated revisions.
740
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
741
dotted_revno will be None for ghosts
742
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
743
is not found walking the left-hand history
745
repo = branch.repository
746
graph = repo.get_graph()
747
if start_rev_id is None and end_rev_id is None:
749
br_revno, br_rev_id = branch.last_revision_info()
750
except errors.GhostRevisionsHaveNoRevno:
751
br_rev_id = branch.last_revision()
755
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
756
(_mod_revision.NULL_REVISION,))
759
revision_id = next(graph_iter)
760
except errors.RevisionNotPresent as e:
762
yield e.revision_id, None, None
764
except StopIteration:
767
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
768
if cur_revno is not None:
771
br_rev_id = branch.last_revision()
772
if end_rev_id is None:
773
end_rev_id = br_rev_id
774
found_start = start_rev_id is None
775
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
776
(_mod_revision.NULL_REVISION,))
779
revision_id = next(graph_iter)
780
except StopIteration:
782
except errors.RevisionNotPresent as e:
784
yield e.revision_id, None, None
787
revno_str = _compute_revno_str(branch, revision_id)
788
if not found_start and revision_id == start_rev_id:
789
if not exclude_common_ancestry:
790
yield revision_id, revno_str, 0
794
yield revision_id, revno_str, 0
796
raise _StartNotLinearAncestor()
799
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
800
rebase_initial_depths=True,
801
exclude_common_ancestry=False):
802
"""Calculate revisions to view including merges, newest to oldest.
804
:param branch: the branch
805
:param start_rev_id: the lower revision-id
806
:param end_rev_id: the upper revision-id
807
:param rebase_initial_depth: should depths be rebased until a mainline
809
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
811
if exclude_common_ancestry:
812
stop_rule = 'with-merges-without-common-ancestry'
814
stop_rule = 'with-merges'
815
view_revisions = branch.iter_merge_sorted_revisions(
816
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
818
if not rebase_initial_depths:
819
for (rev_id, merge_depth, revno, end_of_merge
821
yield rev_id, '.'.join(map(str, revno)), merge_depth
823
# We're following a development line starting at a merged revision.
824
# We need to adjust depths down by the initial depth until we find
825
# a depth less than it. Then we use that depth as the adjustment.
826
# If and when we reach the mainline, depth adjustment ends.
827
depth_adjustment = None
828
for (rev_id, merge_depth, revno, end_of_merge
830
if depth_adjustment is None:
831
depth_adjustment = merge_depth
833
if merge_depth < depth_adjustment:
834
# From now on we reduce the depth adjustement, this can be
835
# surprising for users. The alternative requires two passes
836
# which breaks the fast display of the first revision
838
depth_adjustment = merge_depth
839
merge_depth -= depth_adjustment
840
yield rev_id, '.'.join(map(str, revno)), merge_depth
843
def _rebase_merge_depth(view_revisions):
844
"""Adjust depths upwards so the top level is 0."""
845
# If either the first or last revision have a merge_depth of 0, we're done
846
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
847
min_depth = min([d for r, n, d in view_revisions])
849
view_revisions = [(r, n, d - min_depth)
850
for r, n, d in view_revisions]
851
return view_revisions
854
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
855
file_ids=None, direction='reverse'):
856
"""Create a revision iterator for log.
858
:param branch: The branch being logged.
859
:param view_revisions: The revisions being viewed.
860
:param generate_delta: Whether to generate a delta for each revision.
861
Permitted values are None, 'full' and 'partial'.
862
:param search: A user text search string.
863
:param file_ids: If non empty, only revisions matching one or more of
864
the file-ids are to be kept.
865
:param direction: the direction in which view_revisions is sorted
866
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
869
# Convert view_revisions into (view, None, None) groups to fit with
870
# the standard interface here.
871
if isinstance(view_revisions, list):
872
# A single batch conversion is faster than many incremental ones.
873
# As we have all the data, do a batch conversion.
874
nones = [None] * len(view_revisions)
875
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
878
for view in view_revisions:
879
yield (view, None, None)
880
log_rev_iterator = iter([_convert()])
881
for adapter in log_adapters:
882
# It would be nicer if log adapters were first class objects
883
# with custom parameters. This will do for now. IGC 20090127
884
if adapter == _make_delta_filter:
885
log_rev_iterator = adapter(
886
branch, generate_delta, search, log_rev_iterator, file_ids,
889
log_rev_iterator = adapter(
890
branch, generate_delta, search, log_rev_iterator)
891
return log_rev_iterator
894
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
895
"""Create a filtered iterator of log_rev_iterator matching on a regex.
897
:param branch: The branch being logged.
898
:param generate_delta: Whether to generate a delta for each revision.
899
:param match: A dictionary with properties as keys and lists of strings
900
as values. To match, a revision may match any of the supplied strings
901
within a single property but must match at least one string for each
903
:param log_rev_iterator: An input iterator containing all revisions that
904
could be displayed, in lists.
905
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
909
return log_rev_iterator
910
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
911
for k, v in match.items()]
912
return _filter_re(searchRE, log_rev_iterator)
915
def _filter_re(searchRE, log_rev_iterator):
916
for revs in log_rev_iterator:
917
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
922
def _match_filter(searchRE, rev):
924
'message': (rev.message,),
925
'committer': (rev.committer,),
926
'author': (rev.get_apparent_authors()),
927
'bugs': list(rev.iter_bugs())
929
strings[''] = [item for inner_list in strings.values()
930
for item in inner_list]
931
for (k, v) in searchRE:
932
if k in strings and not _match_any_filter(strings[k], v):
937
def _match_any_filter(strings, res):
938
return any(re.search(s) for re in res for s in strings)
941
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
942
fileids=None, direction='reverse'):
943
"""Add revision deltas to a log iterator if needed.
945
:param branch: The branch being logged.
946
:param generate_delta: Whether to generate a delta for each revision.
947
Permitted values are None, 'full' and 'partial'.
948
:param search: A user text search string.
949
:param log_rev_iterator: An input iterator containing all revisions that
950
could be displayed, in lists.
951
:param fileids: If non empty, only revisions matching one or more of
952
the file-ids are to be kept.
953
:param direction: the direction in which view_revisions is sorted
954
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
957
if not generate_delta and not fileids:
958
return log_rev_iterator
959
return _generate_deltas(branch.repository, log_rev_iterator,
960
generate_delta, fileids, direction)
963
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
965
"""Create deltas for each batch of revisions in log_rev_iterator.
967
If we're only generating deltas for the sake of filtering against
968
file-ids, we stop generating deltas once all file-ids reach the
969
appropriate life-cycle point. If we're receiving data newest to
970
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
972
check_fileids = fileids is not None and len(fileids) > 0
974
fileid_set = set(fileids)
975
if direction == 'reverse':
981
for revs in log_rev_iterator:
982
# If we were matching against fileids and we've run out,
983
# there's nothing left to do
984
if check_fileids and not fileid_set:
986
revisions = [rev[1] for rev in revs]
988
if delta_type == 'full' and not check_fileids:
989
deltas = repository.get_deltas_for_revisions(revisions)
990
for rev, delta in zip(revs, deltas):
991
new_revs.append((rev[0], rev[1], delta))
993
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
994
for rev, delta in zip(revs, deltas):
996
if delta is None or not delta.has_changed():
999
_update_fileids(delta, fileid_set, stop_on)
1000
if delta_type is None:
1002
elif delta_type == 'full':
1003
# If the file matches all the time, rebuilding
1004
# a full delta like this in addition to a partial
1005
# one could be slow. However, it's likely that
1006
# most revisions won't get this far, making it
1007
# faster to filter on the partial deltas and
1008
# build the occasional full delta than always
1009
# building full deltas and filtering those.
1011
delta = repository.get_revision_delta(rev_id)
1012
new_revs.append((rev[0], rev[1], delta))
1016
def _update_fileids(delta, fileids, stop_on):
1017
"""Update the set of file-ids to search based on file lifecycle events.
1019
:param fileids: a set of fileids to update
1020
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1021
fileids set once their add or remove entry is detected respectively
1023
if stop_on == 'add':
1024
for item in delta.added:
1025
if item[1] in fileids:
1026
fileids.remove(item[1])
1027
elif stop_on == 'delete':
1028
for item in delta.removed:
1029
if item[1] in fileids:
1030
fileids.remove(item[1])
1033
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1034
"""Extract revision objects from the repository
1036
:param branch: The branch being logged.
1037
:param generate_delta: Whether to generate a delta for each revision.
1038
:param search: A user text search string.
1039
:param log_rev_iterator: An input iterator containing all revisions that
1040
could be displayed, in lists.
1041
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1044
repository = branch.repository
1045
for revs in log_rev_iterator:
1046
# r = revision_id, n = revno, d = merge depth
1047
revision_ids = [view[0] for view, _, _ in revs]
1048
revisions = dict(repository.iter_revisions(revision_ids))
1049
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1052
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1053
"""Group up a single large batch into smaller ones.
1055
:param branch: The branch being logged.
1056
:param generate_delta: Whether to generate a delta for each revision.
1057
:param search: A user text search string.
1058
:param log_rev_iterator: An input iterator containing all revisions that
1059
could be displayed, in lists.
1060
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1064
for batch in log_rev_iterator:
1067
step = [detail for _, detail in zip(range(num), batch)]
1071
num = min(int(num * 1.5), 200)
1074
def _get_revision_limits(branch, start_revision, end_revision):
1075
"""Get and check revision limits.
1077
:param branch: The branch containing the revisions.
1079
:param start_revision: The first revision to be logged.
1080
but for merge revision support a RevisionInfo is expected.
1082
:param end_revision: The last revision to be logged.
1083
For backwards compatibility this may be a mainline integer revno,
1084
but for merge revision support a RevisionInfo is expected.
1086
:return: (start_rev_id, end_rev_id) tuple.
1090
if start_revision is not None:
1091
if not isinstance(start_revision, revisionspec.RevisionInfo):
1092
raise TypeError(start_revision)
1093
start_rev_id = start_revision.rev_id
1094
start_revno = start_revision.revno
1095
if start_revno is None:
1100
if end_revision is not None:
1101
if not isinstance(end_revision, revisionspec.RevisionInfo):
1102
raise TypeError(start_revision)
1103
end_rev_id = end_revision.rev_id
1104
end_revno = end_revision.revno
1105
if end_revno is None:
1107
end_revno = branch.revno()
1108
except errors.GhostRevisionsHaveNoRevno:
1111
if branch.last_revision() != _mod_revision.NULL_REVISION:
1112
if (start_rev_id == _mod_revision.NULL_REVISION
1113
or end_rev_id == _mod_revision.NULL_REVISION):
1114
raise errors.BzrCommandError(
1115
gettext('Logging revision 0 is invalid.'))
1116
if end_revno is not None and start_revno > end_revno:
1117
raise errors.BzrCommandError(
1118
gettext("Start revision must be older than the end revision."))
1119
return (start_rev_id, end_rev_id)
1122
def _get_mainline_revs(branch, start_revision, end_revision):
1123
"""Get the mainline revisions from the branch.
1125
Generates the list of mainline revisions for the branch.
1127
:param branch: The branch containing the revisions.
1129
:param start_revision: The first revision to be logged.
1130
For backwards compatibility this may be a mainline integer revno,
1131
but for merge revision support a RevisionInfo is expected.
1133
:param end_revision: The last revision to be logged.
1134
For backwards compatibility this may be a mainline integer revno,
1135
but for merge revision support a RevisionInfo is expected.
1137
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1139
branch_revno, branch_last_revision = branch.last_revision_info()
1140
if branch_revno == 0:
1141
return None, None, None, None
1143
# For mainline generation, map start_revision and end_revision to
1144
# mainline revnos. If the revision is not on the mainline choose the
1145
# appropriate extreme of the mainline instead - the extra will be
1147
# Also map the revisions to rev_ids, to be used in the later filtering
1150
if start_revision is None:
1153
if isinstance(start_revision, revisionspec.RevisionInfo):
1154
start_rev_id = start_revision.rev_id
1155
start_revno = start_revision.revno or 1
1157
branch.check_real_revno(start_revision)
1158
start_revno = start_revision
1161
if end_revision is None:
1162
end_revno = branch_revno
1164
if isinstance(end_revision, revisionspec.RevisionInfo):
1165
end_rev_id = end_revision.rev_id
1166
end_revno = end_revision.revno or branch_revno
1168
branch.check_real_revno(end_revision)
1169
end_revno = end_revision
1171
if ((start_rev_id == _mod_revision.NULL_REVISION)
1172
or (end_rev_id == _mod_revision.NULL_REVISION)):
1173
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1174
if start_revno > end_revno:
1175
raise errors.BzrCommandError(gettext("Start revision must be older "
1176
"than the end revision."))
1178
if end_revno < start_revno:
1179
return None, None, None, None
1180
cur_revno = branch_revno
1183
graph = branch.repository.get_graph()
1184
for revision_id in graph.iter_lefthand_ancestry(
1185
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1186
if cur_revno < start_revno:
1187
# We have gone far enough, but we always add 1 more revision
1188
rev_nos[revision_id] = cur_revno
1189
mainline_revs.append(revision_id)
1191
if cur_revno <= end_revno:
1192
rev_nos[revision_id] = cur_revno
1193
mainline_revs.append(revision_id)
1196
# We walked off the edge of all revisions, so we add a 'None' marker
1197
mainline_revs.append(None)
1199
mainline_revs.reverse()
1201
# override the mainline to look like the revision history.
1202
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1205
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1206
include_merges=True):
1207
r"""Return the list of revision ids which touch a given file id.
1209
The function filters view_revisions and returns a subset.
1210
This includes the revisions which directly change the file id,
1211
and the revisions which merge these changes. So if the
1224
And 'C' changes a file, then both C and D will be returned. F will not be
1225
returned even though it brings the changes to C into the branch starting
1226
with E. (Note that if we were using F as the tip instead of G, then we
1229
This will also be restricted based on a subset of the mainline.
1231
:param branch: The branch where we can get text revision information.
1233
:param file_id: Filter out revisions that do not touch file_id.
1235
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1236
tuples. This is the list of revisions which will be filtered. It is
1237
assumed that view_revisions is in merge_sort order (i.e. newest
1240
:param include_merges: include merge revisions in the result or not
1242
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1244
# Lookup all possible text keys to determine which ones actually modified
1246
graph = branch.repository.get_file_graph()
1247
get_parent_map = graph.get_parent_map
1248
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1250
# Looking up keys in batches of 1000 can cut the time in half, as well as
1251
# memory consumption. GraphIndex *does* like to look for a few keys in
1252
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1253
# TODO: This code needs to be re-evaluated periodically as we tune the
1254
# indexing layer. We might consider passing in hints as to the known
1255
# access pattern (sparse/clustered, high success rate/low success
1256
# rate). This particular access is clustered with a low success rate.
1257
modified_text_revisions = set()
1259
for start in range(0, len(text_keys), chunk_size):
1260
next_keys = text_keys[start:start + chunk_size]
1261
# Only keep the revision_id portion of the key
1262
modified_text_revisions.update(
1263
[k[1] for k in get_parent_map(next_keys)])
1264
del text_keys, next_keys
1267
# Track what revisions will merge the current revision, replace entries
1268
# with 'None' when they have been added to result
1269
current_merge_stack = [None]
1270
for info in view_revisions:
1271
rev_id, revno, depth = info
1272
if depth == len(current_merge_stack):
1273
current_merge_stack.append(info)
1275
del current_merge_stack[depth + 1:]
1276
current_merge_stack[-1] = info
1278
if rev_id in modified_text_revisions:
1279
# This needs to be logged, along with the extra revisions
1280
for idx in range(len(current_merge_stack)):
1281
node = current_merge_stack[idx]
1282
if node is not None:
1283
if include_merges or node[2] == 0:
1285
current_merge_stack[idx] = None
1289
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1290
"""Reverse revisions by depth.
1292
Revisions with a different depth are sorted as a group with the previous
1293
revision of that depth. There may be no topological justification for this
1294
but it looks much nicer.
1296
# Add a fake revision at start so that we can always attach sub revisions
1297
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1299
for val in merge_sorted_revisions:
1300
if val[2] == _depth:
1301
# Each revision at the current depth becomes a chunk grouping all
1302
# higher depth revisions.
1303
zd_revisions.append([val])
1305
zd_revisions[-1].append(val)
1306
for revisions in zd_revisions:
1307
if len(revisions) > 1:
1308
# We have higher depth revisions, let reverse them locally
1309
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1310
zd_revisions.reverse()
1312
for chunk in zd_revisions:
1313
result.extend(chunk)
1315
# Top level call, get rid of the fake revisions that have been added
1316
result = [r for r in result if r[0] is not None and r[1] is not None]
1320
class LogRevision(object):
1321
"""A revision to be logged (by LogFormatter.log_revision).
1323
A simple wrapper for the attributes of a revision to be logged.
1324
The attributes may or may not be populated, as determined by the
1325
logging options and the log formatter capabilities.
1328
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1329
tags=None, diff=None, signature=None):
1334
self.revno = str(revno)
1335
self.merge_depth = merge_depth
1339
self.signature = signature
1342
class LogFormatter(object):
1343
"""Abstract class to display log messages.
1345
At a minimum, a derived class must implement the log_revision method.
1347
If the LogFormatter needs to be informed of the beginning or end of
1348
a log it should implement the begin_log and/or end_log hook methods.
1350
A LogFormatter should define the following supports_XXX flags
1351
to indicate which LogRevision attributes it supports:
1353
- supports_delta must be True if this log formatter supports delta.
1354
Otherwise the delta attribute may not be populated. The 'delta_format'
1355
attribute describes whether the 'short_status' format (1) or the long
1356
one (2) should be used.
1358
- supports_merge_revisions must be True if this log formatter supports
1359
merge revisions. If not, then only mainline revisions will be passed
1362
- preferred_levels is the number of levels this formatter defaults to.
1363
The default value is zero meaning display all levels.
1364
This value is only relevant if supports_merge_revisions is True.
1366
- supports_tags must be True if this log formatter supports tags.
1367
Otherwise the tags attribute may not be populated.
1369
- supports_diff must be True if this log formatter supports diffs.
1370
Otherwise the diff attribute may not be populated.
1372
- supports_signatures must be True if this log formatter supports GPG
1375
Plugins can register functions to show custom revision properties using
1376
the properties_handler_registry. The registered function
1377
must respect the following interface description::
1379
def my_show_properties(properties_dict):
1380
# code that returns a dict {'name':'value'} of the properties
1383
preferred_levels = 0
1385
def __init__(self, to_file, show_ids=False, show_timezone='original',
1386
delta_format=None, levels=None, show_advice=False,
1387
to_exact_file=None, author_list_handler=None):
1388
"""Create a LogFormatter.
1390
:param to_file: the file to output to
1391
:param to_exact_file: if set, gives an output stream to which
1392
non-Unicode diffs are written.
1393
:param show_ids: if True, revision-ids are to be displayed
1394
:param show_timezone: the timezone to use
1395
:param delta_format: the level of delta information to display
1396
or None to leave it to the formatter to decide
1397
:param levels: the number of levels to display; None or -1 to
1398
let the log formatter decide.
1399
:param show_advice: whether to show advice at the end of the
1401
:param author_list_handler: callable generating a list of
1402
authors to display for a given revision
1404
self.to_file = to_file
1405
# 'exact' stream used to show diff, it should print content 'as is'
1406
# and should not try to decode/encode it to unicode to avoid bug
1408
if to_exact_file is not None:
1409
self.to_exact_file = to_exact_file
1411
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1412
# better for code that expects to get diffs to pass in the exact
1414
self.to_exact_file = getattr(to_file, 'stream', to_file)
1415
self.show_ids = show_ids
1416
self.show_timezone = show_timezone
1417
if delta_format is None:
1418
# Ensures backward compatibility
1419
delta_format = 2 # long format
1420
self.delta_format = delta_format
1421
self.levels = levels
1422
self._show_advice = show_advice
1423
self._merge_count = 0
1424
self._author_list_handler = author_list_handler
1426
def get_levels(self):
1427
"""Get the number of levels to display or 0 for all."""
1428
if getattr(self, 'supports_merge_revisions', False):
1429
if self.levels is None or self.levels == -1:
1430
self.levels = self.preferred_levels
1435
def log_revision(self, revision):
1438
:param revision: The LogRevision to be logged.
1440
raise NotImplementedError('not implemented in abstract base')
1442
def show_advice(self):
1443
"""Output user advice, if any, when the log is completed."""
1444
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1445
advice_sep = self.get_advice_separator()
1447
self.to_file.write(advice_sep)
1449
"Use --include-merged or -n0 to see merged revisions.\n")
1451
def get_advice_separator(self):
1452
"""Get the text separating the log from the closing advice."""
1455
def short_committer(self, rev):
1456
name, address = config.parse_username(rev.committer)
1461
def short_author(self, rev):
1462
return self.authors(rev, 'first', short=True, sep=', ')
1464
def authors(self, rev, who, short=False, sep=None):
1465
"""Generate list of authors, taking --authors option into account.
1467
The caller has to specify the name of a author list handler,
1468
as provided by the author list registry, using the ``who``
1469
argument. That name only sets a default, though: when the
1470
user selected a different author list generation using the
1471
``--authors`` command line switch, as represented by the
1472
``author_list_handler`` constructor argument, that value takes
1475
:param rev: The revision for which to generate the list of authors.
1476
:param who: Name of the default handler.
1477
:param short: Whether to shorten names to either name or address.
1478
:param sep: What separator to use for automatic concatenation.
1480
if self._author_list_handler is not None:
1481
# The user did specify --authors, which overrides the default
1482
author_list_handler = self._author_list_handler
1484
# The user didn't specify --authors, so we use the caller's default
1485
author_list_handler = author_list_registry.get(who)
1486
names = author_list_handler(rev)
1488
for i in range(len(names)):
1489
name, address = config.parse_username(names[i])
1495
names = sep.join(names)
1498
def merge_marker(self, revision):
1499
"""Get the merge marker to include in the output or '' if none."""
1500
if len(revision.rev.parent_ids) > 1:
1501
self._merge_count += 1
1506
def show_properties(self, revision, indent):
1507
"""Displays the custom properties returned by each registered handler.
1509
If a registered handler raises an error it is propagated.
1511
for line in self.custom_properties(revision):
1512
self.to_file.write("%s%s\n" % (indent, line))
1514
def custom_properties(self, revision):
1515
"""Format the custom properties returned by each registered handler.
1517
If a registered handler raises an error it is propagated.
1519
:return: a list of formatted lines (excluding trailing newlines)
1521
lines = self._foreign_info_properties(revision)
1522
for key, handler in properties_handler_registry.iteritems():
1523
lines.extend(self._format_properties(handler(revision)))
1526
def _foreign_info_properties(self, rev):
1527
"""Custom log displayer for foreign revision identifiers.
1529
:param rev: Revision object.
1531
# Revision comes directly from a foreign repository
1532
if isinstance(rev, foreign.ForeignRevision):
1533
return self._format_properties(
1534
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1536
# Imported foreign revision revision ids always contain :
1537
if b":" not in rev.revision_id:
1540
# Revision was once imported from a foreign repository
1542
foreign_revid, mapping = \
1543
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1544
except errors.InvalidRevisionId:
1547
return self._format_properties(
1548
mapping.vcs.show_foreign_revid(foreign_revid))
1550
def _format_properties(self, properties):
1552
for key, value in properties.items():
1553
lines.append(key + ': ' + value)
1556
def show_diff(self, to_file, diff, indent):
1557
encoding = get_terminal_encoding()
1558
for l in diff.rstrip().split(b'\n'):
1559
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1562
# Separator between revisions in long format
1563
_LONG_SEP = '-' * 60
1566
class LongLogFormatter(LogFormatter):
1568
supports_merge_revisions = True
1569
preferred_levels = 1
1570
supports_delta = True
1571
supports_tags = True
1572
supports_diff = True
1573
supports_signatures = True
1575
def __init__(self, *args, **kwargs):
1576
super(LongLogFormatter, self).__init__(*args, **kwargs)
1577
if self.show_timezone == 'original':
1578
self.date_string = self._date_string_original_timezone
1580
self.date_string = self._date_string_with_timezone
1582
def _date_string_with_timezone(self, rev):
1583
return format_date(rev.timestamp, rev.timezone or 0,
1586
def _date_string_original_timezone(self, rev):
1587
return format_date_with_offset_in_original_timezone(rev.timestamp,
1590
def log_revision(self, revision):
1591
"""Log a revision, either merged or not."""
1592
indent = ' ' * revision.merge_depth
1594
if revision.revno is not None:
1595
lines.append('revno: %s%s' % (revision.revno,
1596
self.merge_marker(revision)))
1598
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1599
if self.show_ids or revision.revno is None:
1600
lines.append('revision-id: %s' %
1601
(revision.rev.revision_id.decode('utf-8'),))
1603
for parent_id in revision.rev.parent_ids:
1604
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1605
lines.extend(self.custom_properties(revision.rev))
1607
committer = revision.rev.committer
1608
authors = self.authors(revision.rev, 'all')
1609
if authors != [committer]:
1610
lines.append('author: %s' % (", ".join(authors),))
1611
lines.append('committer: %s' % (committer,))
1613
branch_nick = revision.rev.properties.get('branch-nick', None)
1614
if branch_nick is not None:
1615
lines.append('branch nick: %s' % (branch_nick,))
1617
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1619
if revision.signature is not None:
1620
lines.append('signature: ' + revision.signature)
1622
lines.append('message:')
1623
if not revision.rev.message:
1624
lines.append(' (no message)')
1626
message = revision.rev.message.rstrip('\r\n')
1627
for l in message.split('\n'):
1628
lines.append(' %s' % (l,))
1630
# Dump the output, appending the delta and diff if requested
1631
to_file = self.to_file
1632
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1633
if revision.delta is not None:
1634
# Use the standard status output to display changes
1635
from breezy.delta import report_delta
1636
report_delta(to_file, revision.delta, short_status=False,
1637
show_ids=self.show_ids, indent=indent)
1638
if revision.diff is not None:
1639
to_file.write(indent + 'diff:\n')
1641
# Note: we explicitly don't indent the diff (relative to the
1642
# revision information) so that the output can be fed to patch -p0
1643
self.show_diff(self.to_exact_file, revision.diff, indent)
1644
self.to_exact_file.flush()
1646
def get_advice_separator(self):
1647
"""Get the text separating the log from the closing advice."""
1648
return '-' * 60 + '\n'
1651
class ShortLogFormatter(LogFormatter):
1653
supports_merge_revisions = True
1654
preferred_levels = 1
1655
supports_delta = True
1656
supports_tags = True
1657
supports_diff = True
1659
def __init__(self, *args, **kwargs):
1660
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1661
self.revno_width_by_depth = {}
1663
def log_revision(self, revision):
1664
# We need two indents: one per depth and one for the information
1665
# relative to that indent. Most mainline revnos are 5 chars or
1666
# less while dotted revnos are typically 11 chars or less. Once
1667
# calculated, we need to remember the offset for a given depth
1668
# as we might be starting from a dotted revno in the first column
1669
# and we want subsequent mainline revisions to line up.
1670
depth = revision.merge_depth
1671
indent = ' ' * depth
1672
revno_width = self.revno_width_by_depth.get(depth)
1673
if revno_width is None:
1674
if revision.revno is None or revision.revno.find('.') == -1:
1675
# mainline revno, e.g. 12345
1678
# dotted revno, e.g. 12345.10.55
1680
self.revno_width_by_depth[depth] = revno_width
1681
offset = ' ' * (revno_width + 1)
1683
to_file = self.to_file
1686
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1687
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1688
revision.revno or "", self.short_author(
1690
format_date(revision.rev.timestamp,
1691
revision.rev.timezone or 0,
1692
self.show_timezone, date_fmt="%Y-%m-%d",
1694
tags, self.merge_marker(revision)))
1695
self.show_properties(revision.rev, indent + offset)
1696
if self.show_ids or revision.revno is None:
1697
to_file.write(indent + offset + 'revision-id:%s\n'
1698
% (revision.rev.revision_id.decode('utf-8'),))
1699
if not revision.rev.message:
1700
to_file.write(indent + offset + '(no message)\n')
1702
message = revision.rev.message.rstrip('\r\n')
1703
for l in message.split('\n'):
1704
to_file.write(indent + offset + '%s\n' % (l,))
1706
if revision.delta is not None:
1707
# Use the standard status output to display changes
1708
from breezy.delta import report_delta
1709
report_delta(to_file, revision.delta,
1710
short_status=self.delta_format == 1,
1711
show_ids=self.show_ids, indent=indent + offset)
1712
if revision.diff is not None:
1713
self.show_diff(self.to_exact_file, revision.diff, ' ')
1717
class LineLogFormatter(LogFormatter):
1719
supports_merge_revisions = True
1720
preferred_levels = 1
1721
supports_tags = True
1723
def __init__(self, *args, **kwargs):
1724
super(LineLogFormatter, self).__init__(*args, **kwargs)
1725
width = terminal_width()
1726
if width is not None:
1727
# we need one extra space for terminals that wrap on last char
1729
self._max_chars = width
1731
def truncate(self, str, max_len):
1732
if max_len is None or len(str) <= max_len:
1734
return str[:max_len - 3] + '...'
1736
def date_string(self, rev):
1737
return format_date(rev.timestamp, rev.timezone or 0,
1738
self.show_timezone, date_fmt="%Y-%m-%d",
1741
def message(self, rev):
1743
return '(no message)'
1747
def log_revision(self, revision):
1748
indent = ' ' * revision.merge_depth
1749
self.to_file.write(self.log_string(revision.revno, revision.rev,
1750
self._max_chars, revision.tags, indent))
1751
self.to_file.write('\n')
1753
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1754
"""Format log info into one string. Truncate tail of string
1756
:param revno: revision number or None.
1757
Revision numbers counts from 1.
1758
:param rev: revision object
1759
:param max_chars: maximum length of resulting string
1760
:param tags: list of tags or None
1761
:param prefix: string to prefix each line
1762
:return: formatted truncated string
1766
# show revno only when is not None
1767
out.append("%s:" % revno)
1768
if max_chars is not None:
1769
out.append(self.truncate(
1770
self.short_author(rev), (max_chars + 3) // 4))
1772
out.append(self.short_author(rev))
1773
out.append(self.date_string(rev))
1774
if len(rev.parent_ids) > 1:
1775
out.append('[merge]')
1777
tag_str = '{%s}' % (', '.join(sorted(tags)))
1779
out.append(rev.get_summary())
1780
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1783
class GnuChangelogLogFormatter(LogFormatter):
1785
supports_merge_revisions = True
1786
supports_delta = True
1788
def log_revision(self, revision):
1789
"""Log a revision, either merged or not."""
1790
to_file = self.to_file
1792
date_str = format_date(revision.rev.timestamp,
1793
revision.rev.timezone or 0,
1795
date_fmt='%Y-%m-%d',
1797
committer_str = self.authors(revision.rev, 'first', sep=', ')
1798
committer_str = committer_str.replace(' <', ' <')
1799
to_file.write('%s %s\n\n' % (date_str, committer_str))
1801
if revision.delta is not None and revision.delta.has_changed():
1802
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1804
to_file.write('\t* %s:\n' % (path,))
1805
for c in revision.delta.renamed:
1806
oldpath, newpath = c[:2]
1807
# For renamed files, show both the old and the new path
1808
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1811
if not revision.rev.message:
1812
to_file.write('\tNo commit message\n')
1814
message = revision.rev.message.rstrip('\r\n')
1815
for l in message.split('\n'):
1816
to_file.write('\t%s\n' % (l.lstrip(),))
1820
def line_log(rev, max_chars):
1821
lf = LineLogFormatter(None)
1822
return lf.log_string(None, rev, max_chars)
1825
class LogFormatterRegistry(registry.Registry):
1826
"""Registry for log formatters"""
1828
def make_formatter(self, name, *args, **kwargs):
1829
"""Construct a formatter from arguments.
1831
:param name: Name of the formatter to construct. 'short', 'long' and
1832
'line' are built-in.
1834
return self.get(name)(*args, **kwargs)
1836
def get_default(self, branch):
1837
c = branch.get_config_stack()
1838
return self.get(c.get('log_format'))
1841
log_formatter_registry = LogFormatterRegistry()
1844
log_formatter_registry.register('short', ShortLogFormatter,
1845
'Moderately short log format.')
1846
log_formatter_registry.register('long', LongLogFormatter,
1847
'Detailed log format.')
1848
log_formatter_registry.register('line', LineLogFormatter,
1849
'Log format with one line per revision.')
1850
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1851
'Format used by GNU ChangeLog files.')
1854
def register_formatter(name, formatter):
1855
log_formatter_registry.register(name, formatter)
1858
def log_formatter(name, *args, **kwargs):
1859
"""Construct a formatter from arguments.
1861
name -- Name of the formatter to construct; currently 'long', 'short' and
1862
'line' are supported.
1865
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1867
raise errors.BzrCommandError(
1868
gettext("unknown log formatter: %r") % name)
1871
def author_list_all(rev):
1872
return rev.get_apparent_authors()[:]
1875
def author_list_first(rev):
1876
lst = rev.get_apparent_authors()
1883
def author_list_committer(rev):
1884
return [rev.committer]
1887
author_list_registry = registry.Registry()
1889
author_list_registry.register('all', author_list_all,
1892
author_list_registry.register('first', author_list_first,
1895
author_list_registry.register('committer', author_list_committer,
1899
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1901
"""Show the change in revision history comparing the old revision history to the new one.
1903
:param branch: The branch where the revisions exist
1904
:param old_rh: The old revision history
1905
:param new_rh: The new revision history
1906
:param to_file: A file to write the results to. If None, stdout will be used
1909
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1911
lf = log_formatter(log_format,
1914
show_timezone='original')
1916
# This is the first index which is different between
1919
for i in range(max(len(new_rh), len(old_rh))):
1920
if (len(new_rh) <= i
1922
or new_rh[i] != old_rh[i]):
1926
if base_idx is None:
1927
to_file.write('Nothing seems to have changed\n')
1929
# TODO: It might be nice to do something like show_log
1930
# and show the merged entries. But since this is the
1931
# removed revisions, it shouldn't be as important
1932
if base_idx < len(old_rh):
1933
to_file.write('*' * 60)
1934
to_file.write('\nRemoved Revisions:\n')
1935
for i in range(base_idx, len(old_rh)):
1936
rev = branch.repository.get_revision(old_rh[i])
1937
lr = LogRevision(rev, i + 1, 0, None)
1939
to_file.write('*' * 60)
1940
to_file.write('\n\n')
1941
if base_idx < len(new_rh):
1942
to_file.write('Added Revisions:\n')
1947
direction='forward',
1948
start_revision=base_idx + 1,
1949
end_revision=len(new_rh),
1953
def get_history_change(old_revision_id, new_revision_id, repository):
1954
"""Calculate the uncommon lefthand history between two revisions.
1956
:param old_revision_id: The original revision id.
1957
:param new_revision_id: The new revision id.
1958
:param repository: The repository to use for the calculation.
1960
return old_history, new_history
1963
old_revisions = set()
1965
new_revisions = set()
1966
graph = repository.get_graph()
1967
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1968
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1969
stop_revision = None
1972
while do_new or do_old:
1975
new_revision = next(new_iter)
1976
except StopIteration:
1979
new_history.append(new_revision)
1980
new_revisions.add(new_revision)
1981
if new_revision in old_revisions:
1982
stop_revision = new_revision
1986
old_revision = next(old_iter)
1987
except StopIteration:
1990
old_history.append(old_revision)
1991
old_revisions.add(old_revision)
1992
if old_revision in new_revisions:
1993
stop_revision = old_revision
1995
new_history.reverse()
1996
old_history.reverse()
1997
if stop_revision is not None:
1998
new_history = new_history[new_history.index(stop_revision) + 1:]
1999
old_history = old_history[old_history.index(stop_revision) + 1:]
2000
return old_history, new_history
2003
def show_branch_change(branch, output, old_revno, old_revision_id):
2004
"""Show the changes made to a branch.
2006
:param branch: The branch to show changes about.
2007
:param output: A file-like object to write changes to.
2008
:param old_revno: The revno of the old tip.
2009
:param old_revision_id: The revision_id of the old tip.
2011
new_revno, new_revision_id = branch.last_revision_info()
2012
old_history, new_history = get_history_change(old_revision_id,
2015
if old_history == [] and new_history == []:
2016
output.write('Nothing seems to have changed\n')
2019
log_format = log_formatter_registry.get_default(branch)
2020
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2021
if old_history != []:
2022
output.write('*' * 60)
2023
output.write('\nRemoved Revisions:\n')
2024
show_flat_log(branch.repository, old_history, old_revno, lf)
2025
output.write('*' * 60)
2026
output.write('\n\n')
2027
if new_history != []:
2028
output.write('Added Revisions:\n')
2029
start_revno = new_revno - len(new_history) + 1
2030
show_log(branch, lf, None, verbose=False, direction='forward',
2031
start_revision=start_revno)
2034
def show_flat_log(repository, history, last_revno, lf):
2035
"""Show a simple log of the specified history.
2037
:param repository: The repository to retrieve revisions from.
2038
:param history: A list of revision_ids indicating the lefthand history.
2039
:param last_revno: The revno of the last revision_id in the history.
2040
:param lf: The log formatter to use.
2042
revisions = repository.get_revisions(history)
2043
for i, rev in enumerate(revisions):
2044
lr = LogRevision(rev, i + last_revno, 0, None)
2048
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2049
"""Find file-ids and kinds given a list of files and a revision range.
2051
We search for files at the end of the range. If not found there,
2052
we try the start of the range.
2054
:param revisionspec_list: revision range as parsed on the command line
2055
:param file_list: the list of paths given on the command line;
2056
the first of these can be a branch location or a file path,
2057
the remainder must be file paths
2058
:param add_cleanup: When the branch returned is read locked,
2059
an unlock call will be queued to the cleanup.
2060
:return: (branch, info_list, start_rev_info, end_rev_info) where
2061
info_list is a list of (relative_path, file_id, kind) tuples where
2062
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2063
branch will be read-locked.
2065
from breezy.builtins import _get_revision_range
2066
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2068
add_cleanup(b.lock_read().unlock)
2069
# XXX: It's damn messy converting a list of paths to relative paths when
2070
# those paths might be deleted ones, they might be on a case-insensitive
2071
# filesystem and/or they might be in silly locations (like another branch).
2072
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2073
# file2 implicitly in the same dir as file1 or should its directory be
2074
# taken from the current tree somehow?) For now, this solves the common
2075
# case of running log in a nested directory, assuming paths beyond the
2076
# first one haven't been deleted ...
2078
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2080
relpaths = [path] + file_list[1:]
2082
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2084
if relpaths in ([], [u'']):
2085
return b, [], start_rev_info, end_rev_info
2086
if start_rev_info is None and end_rev_info is None:
2088
tree = b.basis_tree()
2091
file_id = tree.path2id(fp)
2092
kind = _get_kind_for_file_id(tree, fp, file_id)
2094
# go back to when time began
2097
rev1 = b.get_rev_id(1)
2098
except errors.NoSuchRevision:
2103
tree1 = b.repository.revision_tree(rev1)
2105
file_id = tree1.path2id(fp)
2106
kind = _get_kind_for_file_id(tree1, fp, file_id)
2107
info_list.append((fp, file_id, kind))
2109
elif start_rev_info == end_rev_info:
2110
# One revision given - file must exist in it
2111
tree = b.repository.revision_tree(end_rev_info.rev_id)
2113
file_id = tree.path2id(fp)
2114
kind = _get_kind_for_file_id(tree, fp, file_id)
2115
info_list.append((fp, file_id, kind))
2118
# Revision range given. Get the file-id from the end tree.
2119
# If that fails, try the start tree.
2120
rev_id = end_rev_info.rev_id
2122
tree = b.basis_tree()
2124
tree = b.repository.revision_tree(rev_id)
2127
file_id = tree.path2id(fp)
2128
kind = _get_kind_for_file_id(tree, fp, file_id)
2131
rev_id = start_rev_info.rev_id
2133
rev1 = b.get_rev_id(1)
2134
tree1 = b.repository.revision_tree(rev1)
2136
tree1 = b.repository.revision_tree(rev_id)
2137
file_id = tree1.path2id(fp)
2138
kind = _get_kind_for_file_id(tree1, fp, file_id)
2139
info_list.append((fp, file_id, kind))
2140
return b, info_list, start_rev_info, end_rev_info
2143
def _get_kind_for_file_id(tree, path, file_id):
2144
"""Return the kind of a file-id or None if it doesn't exist."""
2145
if file_id is not None:
2146
return tree.kind(path, file_id)
2151
properties_handler_registry = registry.Registry()
2153
# Use the properties handlers to print out bug information if available
2156
def _bugs_properties_handler(revision):
2158
if 'bugs' in revision.properties:
2159
bug_lines = revision.properties['bugs'].split('\n')
2160
bug_rows = [line.split(' ', 1) for line in bug_lines]
2161
fixed_bug_urls = [row[0] for row in bug_rows if
2162
len(row) > 1 and row[1] == 'fixed']
2163
related_bug_urls = [row[0] for row in bug_rows if
2164
len(row) > 1 and row[1] == 'related']
2166
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2167
ret[text] = ' '.join(fixed_bug_urls)
2168
if related_bug_urls:
2169
text = ngettext('related bug', 'related bugs',
2170
len(related_bug_urls))
2171
ret[text] = ' '.join(related_bug_urls)
2175
properties_handler_registry.register('bugs_properties_handler',
2176
_bugs_properties_handler)
2179
# adapters which revision ids to log are filtered. When log is called, the
2180
# log_rev_iterator is adapted through each of these factory methods.
2181
# Plugins are welcome to mutate this list in any way they like - as long
2182
# as the overall behaviour is preserved. At this point there is no extensible
2183
# mechanism for getting parameters to each factory method, and until there is
2184
# this won't be considered a stable api.
2188
# read revision objects
2189
_make_revision_objects,
2190
# filter on log messages
2191
_make_search_filter,
2192
# generate deltas for things we will show