1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
69
revision as _mod_revision,
71
from breezy.i18n import gettext, ngettext
79
from .osutils import (
81
format_date_with_offset_in_original_timezone,
82
get_diff_header_encoding,
83
get_terminal_encoding,
91
from .tree import find_previous_path
94
def find_touching_revisions(repository, last_revision, last_tree, last_path):
95
"""Yield a description of revisions which affect the file_id.
97
Each returned element is (revno, revision_id, description)
99
This is the list of revisions where the file is either added,
100
modified, renamed or deleted.
102
TODO: Perhaps some way to limit this to only particular revisions,
103
or to traverse a non-mainline set of revisions?
105
last_verifier = last_tree.get_file_verifier(last_path)
106
graph = repository.get_graph()
107
history = list(graph.iter_lefthand_ancestry(last_revision, []))
109
for revision_id in history:
110
this_tree = repository.revision_tree(revision_id)
111
this_path = find_previous_path(last_tree, this_tree, last_path)
113
# now we know how it was last time, and how it is in this revision.
114
# are those two states effectively the same or not?
115
if this_path is not None and last_path is None:
116
yield revno, revision_id, "deleted " + this_path
117
this_verifier = this_tree.get_file_verifier(this_path)
118
elif this_path is None and last_path is not None:
119
yield revno, revision_id, "added " + last_path
120
elif this_path != last_path:
121
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
122
this_verifier = this_tree.get_file_verifier(this_path)
124
this_verifier = this_tree.get_file_verifier(this_path)
125
if (this_verifier != last_verifier):
126
yield revno, revision_id, "modified " + this_path
128
last_verifier = this_verifier
129
last_path = this_path
130
last_tree = this_tree
131
if last_path is None:
138
specific_fileid=None,
147
"""Write out human-readable log of commits to this branch.
149
This function is being retained for backwards compatibility but
150
should not be extended with new parameters. Use the new Logger class
151
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
152
make_log_request_dict function.
154
:param lf: The LogFormatter object showing the output.
156
:param specific_fileid: If not None, list only the commits affecting the
157
specified file, rather than all commits.
159
:param verbose: If True show added/changed/deleted/renamed files.
161
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
164
:param start_revision: If not None, only show revisions >= start_revision
166
:param end_revision: If not None, only show revisions <= end_revision
168
:param search: If not None, only show revisions with matching commit
171
:param limit: If set, shows only 'limit' revisions, all revisions are shown
174
:param show_diff: If True, output a diff after each revision.
176
:param match: Dictionary of search lists to use when matching revision
179
# Convert old-style parameters to new-style parameters
180
if specific_fileid is not None:
181
file_ids = [specific_fileid]
186
delta_type = 'partial'
193
diff_type = 'partial'
199
if isinstance(start_revision, int):
201
start_revision = revisionspec.RevisionInfo(branch, start_revision)
202
except errors.NoSuchRevision:
203
raise errors.InvalidRevisionNumber(start_revision)
205
if isinstance(end_revision, int):
207
end_revision = revisionspec.RevisionInfo(branch, end_revision)
208
except errors.NoSuchRevision:
209
raise errors.InvalidRevisionNumber(end_revision)
211
if end_revision is not None and end_revision.revno == 0:
212
raise errors.InvalidRevisionNumber(end_revision.revno)
214
# Build the request and execute it
215
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
216
start_revision=start_revision, end_revision=end_revision,
217
limit=limit, message_search=search,
218
delta_type=delta_type, diff_type=diff_type)
219
Logger(branch, rqst).show(lf)
222
# Note: This needs to be kept in sync with the defaults in
223
# make_log_request_dict() below
224
_DEFAULT_REQUEST_PARAMS = {
225
'direction': 'reverse',
227
'generate_tags': True,
228
'exclude_common_ancestry': False,
229
'_match_using_deltas': True,
233
def make_log_request_dict(direction='reverse', specific_fileids=None,
234
start_revision=None, end_revision=None, limit=None,
235
message_search=None, levels=None, generate_tags=True,
237
diff_type=None, _match_using_deltas=True,
238
exclude_common_ancestry=False, match=None,
239
signature=False, omit_merges=False,
241
"""Convenience function for making a logging request dictionary.
243
Using this function may make code slightly safer by ensuring
244
parameters have the correct names. It also provides a reference
245
point for documenting the supported parameters.
247
:param direction: 'reverse' (default) is latest to earliest;
248
'forward' is earliest to latest.
250
:param specific_fileids: If not None, only include revisions
251
affecting the specified files, rather than all revisions.
253
:param start_revision: If not None, only generate
254
revisions >= start_revision
256
:param end_revision: If not None, only generate
257
revisions <= end_revision
259
:param limit: If set, generate only 'limit' revisions, all revisions
260
are shown if None or 0.
262
:param message_search: If not None, only include revisions with
263
matching commit messages
265
:param levels: the number of levels of revisions to
266
generate; 1 for just the mainline; 0 for all levels, or None for
269
:param generate_tags: If True, include tags for matched revisions.
271
:param delta_type: Either 'full', 'partial' or None.
272
'full' means generate the complete delta - adds/deletes/modifies/etc;
273
'partial' means filter the delta using specific_fileids;
274
None means do not generate any delta.
276
:param diff_type: Either 'full', 'partial' or None.
277
'full' means generate the complete diff - adds/deletes/modifies/etc;
278
'partial' means filter the diff using specific_fileids;
279
None means do not generate any diff.
281
:param _match_using_deltas: a private parameter controlling the
282
algorithm used for matching specific_fileids. This parameter
283
may be removed in the future so breezy client code should NOT
286
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
287
range operator or as a graph difference.
289
:param signature: show digital signature information
291
:param match: Dictionary of list of search strings to use when filtering
292
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
293
the empty string to match any of the preceding properties.
295
:param omit_merges: If True, commits with more than one parent are
299
# Take care of old style message_search parameter
302
if 'message' in match:
303
match['message'].append(message_search)
305
match['message'] = [message_search]
307
match = {'message': [message_search]}
309
'direction': direction,
310
'specific_fileids': specific_fileids,
311
'start_revision': start_revision,
312
'end_revision': end_revision,
315
'generate_tags': generate_tags,
316
'delta_type': delta_type,
317
'diff_type': diff_type,
318
'exclude_common_ancestry': exclude_common_ancestry,
319
'signature': signature,
321
'omit_merges': omit_merges,
322
# Add 'private' attributes for features that may be deprecated
323
'_match_using_deltas': _match_using_deltas,
327
def _apply_log_request_defaults(rqst):
328
"""Apply default values to a request dictionary."""
329
result = _DEFAULT_REQUEST_PARAMS.copy()
335
def format_signature_validity(rev_id, branch):
336
"""get the signature validity
338
:param rev_id: revision id to validate
339
:param branch: branch of revision
340
:return: human readable string to print to log
342
from breezy import gpg
344
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
345
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
346
if result[0] == gpg.SIGNATURE_VALID:
347
return u"valid signature from {0}".format(result[1])
348
if result[0] == gpg.SIGNATURE_KEY_MISSING:
349
return "unknown key {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_NOT_VALID:
351
return "invalid signature!"
352
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
353
return "no signature"
356
class LogGenerator(object):
357
"""A generator of log revisions."""
359
def iter_log_revisions(self):
360
"""Iterate over LogRevision objects.
362
:return: An iterator yielding LogRevision objects.
364
raise NotImplementedError(self.iter_log_revisions)
367
class Logger(object):
368
"""An object that generates, formats and displays a log."""
370
def __init__(self, branch, rqst):
373
:param branch: the branch to log
374
:param rqst: A dictionary specifying the query parameters.
375
See make_log_request_dict() for supported values.
378
self.rqst = _apply_log_request_defaults(rqst)
383
:param lf: The LogFormatter object to send the output to.
385
if not isinstance(lf, LogFormatter):
386
warn("not a LogFormatter instance: %r" % lf)
388
with self.branch.lock_read():
389
if getattr(lf, 'begin_log', None):
392
if getattr(lf, 'end_log', None):
395
def _show_body(self, lf):
396
"""Show the main log output.
398
Subclasses may wish to override this.
400
# Tweak the LogRequest based on what the LogFormatter can handle.
401
# (There's no point generating stuff if the formatter can't display it.)
403
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
404
# user didn't specify levels, use whatever the LF can handle:
405
rqst['levels'] = lf.get_levels()
407
if not getattr(lf, 'supports_tags', False):
408
rqst['generate_tags'] = False
409
if not getattr(lf, 'supports_delta', False):
410
rqst['delta_type'] = None
411
if not getattr(lf, 'supports_diff', False):
412
rqst['diff_type'] = None
413
if not getattr(lf, 'supports_signatures', False):
414
rqst['signature'] = False
416
# Find and print the interesting revisions
417
generator = self._generator_factory(self.branch, rqst)
419
for lr in generator.iter_log_revisions():
421
except errors.GhostRevisionUnusableHere:
422
raise errors.BzrCommandError(
423
gettext('Further revision history missing.'))
426
def _generator_factory(self, branch, rqst):
427
"""Make the LogGenerator object to use.
429
Subclasses may wish to override this.
431
return _DefaultLogGenerator(branch, rqst)
434
class _StartNotLinearAncestor(Exception):
435
"""Raised when a start revision is not found walking left-hand history."""
438
class _DefaultLogGenerator(LogGenerator):
439
"""The default generator of log revisions."""
441
def __init__(self, branch, rqst):
444
if rqst.get('generate_tags') and branch.supports_tags():
445
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
447
self.rev_tag_dict = {}
449
def iter_log_revisions(self):
450
"""Iterate over LogRevision objects.
452
:return: An iterator yielding LogRevision objects.
455
levels = rqst.get('levels')
456
limit = rqst.get('limit')
457
diff_type = rqst.get('diff_type')
458
show_signature = rqst.get('signature')
459
omit_merges = rqst.get('omit_merges')
461
revision_iterator = self._create_log_revision_iterator()
462
for revs in revision_iterator:
463
for (rev_id, revno, merge_depth), rev, delta in revs:
464
# 0 levels means show everything; merge_depth counts from 0
465
if (levels != 0 and merge_depth is not None and
466
merge_depth >= levels):
468
if omit_merges and len(rev.parent_ids) > 1:
471
raise errors.GhostRevisionUnusableHere(rev_id)
472
if diff_type is None:
475
diff = self._format_diff(rev, rev_id, diff_type)
477
signature = format_signature_validity(rev_id, self.branch)
481
rev, revno, merge_depth, delta,
482
self.rev_tag_dict.get(rev_id), diff, signature)
485
if log_count >= limit:
488
def _format_diff(self, rev, rev_id, diff_type):
489
repo = self.branch.repository
490
if len(rev.parent_ids) == 0:
491
ancestor_id = _mod_revision.NULL_REVISION
493
ancestor_id = rev.parent_ids[0]
494
tree_1 = repo.revision_tree(ancestor_id)
495
tree_2 = repo.revision_tree(rev_id)
496
file_ids = self.rqst.get('specific_fileids')
497
if diff_type == 'partial' and file_ids is not None:
498
specific_files = [tree_2.id2path(id) for id in file_ids]
500
specific_files = None
502
path_encoding = get_diff_header_encoding()
503
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
504
new_label='', path_encoding=path_encoding)
507
def _create_log_revision_iterator(self):
508
"""Create a revision iterator for log.
510
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
513
self.start_rev_id, self.end_rev_id = _get_revision_limits(
514
self.branch, self.rqst.get('start_revision'),
515
self.rqst.get('end_revision'))
516
if self.rqst.get('_match_using_deltas'):
517
return self._log_revision_iterator_using_delta_matching()
519
# We're using the per-file-graph algorithm. This scales really
520
# well but only makes sense if there is a single file and it's
522
file_count = len(self.rqst.get('specific_fileids'))
524
raise errors.BzrError(
525
"illegal LogRequest: must match-using-deltas "
526
"when logging %d files" % file_count)
527
return self._log_revision_iterator_using_per_file_graph()
529
def _log_revision_iterator_using_delta_matching(self):
530
# Get the base revisions, filtering by the revision range
532
generate_merge_revisions = rqst.get('levels') != 1
533
delayed_graph_generation = not rqst.get('specific_fileids') and (
534
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
535
view_revisions = _calc_view_revisions(
536
self.branch, self.start_rev_id, self.end_rev_id,
537
rqst.get('direction'),
538
generate_merge_revisions=generate_merge_revisions,
539
delayed_graph_generation=delayed_graph_generation,
540
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
542
# Apply the other filters
543
return make_log_rev_iterator(self.branch, view_revisions,
544
rqst.get('delta_type'), rqst.get('match'),
545
file_ids=rqst.get('specific_fileids'),
546
direction=rqst.get('direction'))
548
def _log_revision_iterator_using_per_file_graph(self):
549
# Get the base revisions, filtering by the revision range.
550
# Note that we always generate the merge revisions because
551
# filter_revisions_touching_file_id() requires them ...
553
view_revisions = _calc_view_revisions(
554
self.branch, self.start_rev_id, self.end_rev_id,
555
rqst.get('direction'), generate_merge_revisions=True,
556
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
557
if not isinstance(view_revisions, list):
558
view_revisions = list(view_revisions)
559
view_revisions = _filter_revisions_touching_file_id(self.branch,
560
rqst.get('specific_fileids')[
562
include_merges=rqst.get('levels') != 1)
563
return make_log_rev_iterator(self.branch, view_revisions,
564
rqst.get('delta_type'), rqst.get('match'))
567
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
568
generate_merge_revisions,
569
delayed_graph_generation=False,
570
exclude_common_ancestry=False,
572
"""Calculate the revisions to view.
574
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
575
a list of the same tuples.
577
if (exclude_common_ancestry and start_rev_id == end_rev_id):
578
raise errors.BzrCommandError(gettext(
579
'--exclude-common-ancestry requires two different revisions'))
580
if direction not in ('reverse', 'forward'):
581
raise ValueError(gettext('invalid direction %r') % direction)
582
br_rev_id = branch.last_revision()
583
if br_rev_id == _mod_revision.NULL_REVISION:
586
if (end_rev_id and start_rev_id == end_rev_id
587
and (not generate_merge_revisions
588
or not _has_merges(branch, end_rev_id))):
589
# If a single revision is requested, check we can handle it
590
return _generate_one_revision(branch, end_rev_id, br_rev_id,
592
if not generate_merge_revisions:
594
# If we only want to see linear revisions, we can iterate ...
595
iter_revs = _linear_view_revisions(
596
branch, start_rev_id, end_rev_id,
597
exclude_common_ancestry=exclude_common_ancestry)
598
# If a start limit was given and it's not obviously an
599
# ancestor of the end limit, check it before outputting anything
600
if (direction == 'forward'
601
or (start_rev_id and not _is_obvious_ancestor(
602
branch, start_rev_id, end_rev_id))):
603
iter_revs = list(iter_revs)
604
if direction == 'forward':
605
iter_revs = reversed(iter_revs)
607
except _StartNotLinearAncestor:
608
# Switch to the slower implementation that may be able to find a
609
# non-obvious ancestor out of the left-hand history.
611
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
612
direction, delayed_graph_generation,
613
exclude_common_ancestry)
614
if direction == 'forward':
615
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
619
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
620
if rev_id == br_rev_id:
622
return [(br_rev_id, br_revno, 0)]
624
revno_str = _compute_revno_str(branch, rev_id)
625
return [(rev_id, revno_str, 0)]
628
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
629
delayed_graph_generation,
630
exclude_common_ancestry=False):
631
# On large trees, generating the merge graph can take 30-60 seconds
632
# so we delay doing it until a merge is detected, incrementally
633
# returning initial (non-merge) revisions while we can.
635
# The above is only true for old formats (<= 0.92), for newer formats, a
636
# couple of seconds only should be needed to load the whole graph and the
637
# other graph operations needed are even faster than that -- vila 100201
638
initial_revisions = []
639
if delayed_graph_generation:
641
for rev_id, revno, depth in _linear_view_revisions(
642
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
643
if _has_merges(branch, rev_id):
644
# The end_rev_id can be nested down somewhere. We need an
645
# explicit ancestry check. There is an ambiguity here as we
646
# may not raise _StartNotLinearAncestor for a revision that
647
# is an ancestor but not a *linear* one. But since we have
648
# loaded the graph to do the check (or calculate a dotted
649
# revno), we may as well accept to show the log... We need
650
# the check only if start_rev_id is not None as all
651
# revisions have _mod_revision.NULL_REVISION as an ancestor
653
graph = branch.repository.get_graph()
654
if (start_rev_id is not None
655
and not graph.is_ancestor(start_rev_id, end_rev_id)):
656
raise _StartNotLinearAncestor()
657
# Since we collected the revisions so far, we need to
662
initial_revisions.append((rev_id, revno, depth))
664
# No merged revisions found
665
return initial_revisions
666
except _StartNotLinearAncestor:
667
# A merge was never detected so the lower revision limit can't
668
# be nested down somewhere
669
raise errors.BzrCommandError(gettext('Start revision not found in'
670
' history of end revision.'))
672
# We exit the loop above because we encounter a revision with merges, from
673
# this revision, we need to switch to _graph_view_revisions.
675
# A log including nested merges is required. If the direction is reverse,
676
# we rebase the initial merge depths so that the development line is
677
# shown naturally, i.e. just like it is for linear logging. We can easily
678
# make forward the exact opposite display, but showing the merge revisions
679
# indented at the end seems slightly nicer in that case.
680
view_revisions = itertools.chain(iter(initial_revisions),
681
_graph_view_revisions(branch, start_rev_id, end_rev_id,
682
rebase_initial_depths=(
683
direction == 'reverse'),
684
exclude_common_ancestry=exclude_common_ancestry))
685
return view_revisions
688
def _has_merges(branch, rev_id):
689
"""Does a revision have multiple parents or not?"""
690
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
691
return len(parents) > 1
694
def _compute_revno_str(branch, rev_id):
695
"""Compute the revno string from a rev_id.
697
:return: The revno string, or None if the revision is not in the supplied
701
revno = branch.revision_id_to_dotted_revno(rev_id)
702
except errors.NoSuchRevision:
703
# The revision must be outside of this branch
706
return '.'.join(str(n) for n in revno)
709
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
710
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
711
if start_rev_id and end_rev_id:
713
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
714
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
715
except errors.NoSuchRevision:
716
# one or both is not in the branch; not obvious
718
if len(start_dotted) == 1 and len(end_dotted) == 1:
720
return start_dotted[0] <= end_dotted[0]
721
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
722
start_dotted[0:1] == end_dotted[0:1]):
723
# both on same development line
724
return start_dotted[2] <= end_dotted[2]
728
# if either start or end is not specified then we use either the first or
729
# the last revision and *they* are obvious ancestors.
733
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
734
exclude_common_ancestry=False):
735
"""Calculate a sequence of revisions to view, newest to oldest.
737
:param start_rev_id: the lower revision-id
738
:param end_rev_id: the upper revision-id
739
:param exclude_common_ancestry: Whether the start_rev_id should be part of
740
the iterated revisions.
741
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
742
dotted_revno will be None for ghosts
743
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
744
is not found walking the left-hand history
746
repo = branch.repository
747
graph = repo.get_graph()
748
if start_rev_id is None and end_rev_id is None:
750
br_revno, br_rev_id = branch.last_revision_info()
751
except errors.GhostRevisionsHaveNoRevno:
752
br_rev_id = branch.last_revision()
756
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
757
(_mod_revision.NULL_REVISION,))
760
revision_id = next(graph_iter)
761
except errors.RevisionNotPresent as e:
763
yield e.revision_id, None, None
765
except StopIteration:
768
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
769
if cur_revno is not None:
772
br_rev_id = branch.last_revision()
773
if end_rev_id is None:
774
end_rev_id = br_rev_id
775
found_start = start_rev_id is None
776
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
777
(_mod_revision.NULL_REVISION,))
780
revision_id = next(graph_iter)
781
except StopIteration:
783
except errors.RevisionNotPresent as e:
785
yield e.revision_id, None, None
788
revno_str = _compute_revno_str(branch, revision_id)
789
if not found_start and revision_id == start_rev_id:
790
if not exclude_common_ancestry:
791
yield revision_id, revno_str, 0
795
yield revision_id, revno_str, 0
797
raise _StartNotLinearAncestor()
800
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
801
rebase_initial_depths=True,
802
exclude_common_ancestry=False):
803
"""Calculate revisions to view including merges, newest to oldest.
805
:param branch: the branch
806
:param start_rev_id: the lower revision-id
807
:param end_rev_id: the upper revision-id
808
:param rebase_initial_depth: should depths be rebased until a mainline
810
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
812
if exclude_common_ancestry:
813
stop_rule = 'with-merges-without-common-ancestry'
815
stop_rule = 'with-merges'
816
view_revisions = branch.iter_merge_sorted_revisions(
817
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
819
if not rebase_initial_depths:
820
for (rev_id, merge_depth, revno, end_of_merge
822
yield rev_id, '.'.join(map(str, revno)), merge_depth
824
# We're following a development line starting at a merged revision.
825
# We need to adjust depths down by the initial depth until we find
826
# a depth less than it. Then we use that depth as the adjustment.
827
# If and when we reach the mainline, depth adjustment ends.
828
depth_adjustment = None
829
for (rev_id, merge_depth, revno, end_of_merge
831
if depth_adjustment is None:
832
depth_adjustment = merge_depth
834
if merge_depth < depth_adjustment:
835
# From now on we reduce the depth adjustement, this can be
836
# surprising for users. The alternative requires two passes
837
# which breaks the fast display of the first revision
839
depth_adjustment = merge_depth
840
merge_depth -= depth_adjustment
841
yield rev_id, '.'.join(map(str, revno)), merge_depth
844
def _rebase_merge_depth(view_revisions):
845
"""Adjust depths upwards so the top level is 0."""
846
# If either the first or last revision have a merge_depth of 0, we're done
847
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
848
min_depth = min([d for r, n, d in view_revisions])
850
view_revisions = [(r, n, d - min_depth)
851
for r, n, d in view_revisions]
852
return view_revisions
855
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
856
file_ids=None, direction='reverse'):
857
"""Create a revision iterator for log.
859
:param branch: The branch being logged.
860
:param view_revisions: The revisions being viewed.
861
:param generate_delta: Whether to generate a delta for each revision.
862
Permitted values are None, 'full' and 'partial'.
863
:param search: A user text search string.
864
:param file_ids: If non empty, only revisions matching one or more of
865
the file-ids are to be kept.
866
:param direction: the direction in which view_revisions is sorted
867
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
870
# Convert view_revisions into (view, None, None) groups to fit with
871
# the standard interface here.
872
if isinstance(view_revisions, list):
873
# A single batch conversion is faster than many incremental ones.
874
# As we have all the data, do a batch conversion.
875
nones = [None] * len(view_revisions)
876
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
879
for view in view_revisions:
880
yield (view, None, None)
881
log_rev_iterator = iter([_convert()])
882
for adapter in log_adapters:
883
# It would be nicer if log adapters were first class objects
884
# with custom parameters. This will do for now. IGC 20090127
885
if adapter == _make_delta_filter:
886
log_rev_iterator = adapter(
887
branch, generate_delta, search, log_rev_iterator, file_ids,
890
log_rev_iterator = adapter(
891
branch, generate_delta, search, log_rev_iterator)
892
return log_rev_iterator
895
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
896
"""Create a filtered iterator of log_rev_iterator matching on a regex.
898
:param branch: The branch being logged.
899
:param generate_delta: Whether to generate a delta for each revision.
900
:param match: A dictionary with properties as keys and lists of strings
901
as values. To match, a revision may match any of the supplied strings
902
within a single property but must match at least one string for each
904
:param log_rev_iterator: An input iterator containing all revisions that
905
could be displayed, in lists.
906
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
910
return log_rev_iterator
911
# Use lazy_compile so mapping to InvalidPattern error occurs.
912
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
913
for k, v in match.items()]
914
return _filter_re(searchRE, log_rev_iterator)
917
def _filter_re(searchRE, log_rev_iterator):
918
for revs in log_rev_iterator:
919
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
924
def _match_filter(searchRE, rev):
926
'message': (rev.message,),
927
'committer': (rev.committer,),
928
'author': (rev.get_apparent_authors()),
929
'bugs': list(rev.iter_bugs())
931
strings[''] = [item for inner_list in strings.values()
932
for item in inner_list]
933
for k, v in searchRE:
934
if k in strings and not _match_any_filter(strings[k], v):
939
def _match_any_filter(strings, res):
940
return any(r.search(s) for r in res for s in strings)
943
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
944
fileids=None, direction='reverse'):
945
"""Add revision deltas to a log iterator if needed.
947
:param branch: The branch being logged.
948
:param generate_delta: Whether to generate a delta for each revision.
949
Permitted values are None, 'full' and 'partial'.
950
:param search: A user text search string.
951
:param log_rev_iterator: An input iterator containing all revisions that
952
could be displayed, in lists.
953
:param fileids: If non empty, only revisions matching one or more of
954
the file-ids are to be kept.
955
:param direction: the direction in which view_revisions is sorted
956
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
959
if not generate_delta and not fileids:
960
return log_rev_iterator
961
return _generate_deltas(branch.repository, log_rev_iterator,
962
generate_delta, fileids, direction)
965
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
967
"""Create deltas for each batch of revisions in log_rev_iterator.
969
If we're only generating deltas for the sake of filtering against
970
file-ids, we stop generating deltas once all file-ids reach the
971
appropriate life-cycle point. If we're receiving data newest to
972
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
974
check_fileids = fileids is not None and len(fileids) > 0
976
fileid_set = set(fileids)
977
if direction == 'reverse':
983
for revs in log_rev_iterator:
984
# If we were matching against fileids and we've run out,
985
# there's nothing left to do
986
if check_fileids and not fileid_set:
988
revisions = [rev[1] for rev in revs]
990
if delta_type == 'full' and not check_fileids:
991
deltas = repository.get_deltas_for_revisions(revisions)
992
for rev, delta in zip(revs, deltas):
993
new_revs.append((rev[0], rev[1], delta))
995
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
996
for rev, delta in zip(revs, deltas):
998
if delta is None or not delta.has_changed():
1001
_update_fileids(delta, fileid_set, stop_on)
1002
if delta_type is None:
1004
elif delta_type == 'full':
1005
# If the file matches all the time, rebuilding
1006
# a full delta like this in addition to a partial
1007
# one could be slow. However, it's likely that
1008
# most revisions won't get this far, making it
1009
# faster to filter on the partial deltas and
1010
# build the occasional full delta than always
1011
# building full deltas and filtering those.
1013
delta = repository.get_revision_delta(rev_id)
1014
new_revs.append((rev[0], rev[1], delta))
1018
def _update_fileids(delta, fileids, stop_on):
1019
"""Update the set of file-ids to search based on file lifecycle events.
1021
:param fileids: a set of fileids to update
1022
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1023
fileids set once their add or remove entry is detected respectively
1025
if stop_on == 'add':
1026
for item in delta.added:
1027
if item[1] in fileids:
1028
fileids.remove(item[1])
1029
elif stop_on == 'delete':
1030
for item in delta.removed:
1031
if item[1] in fileids:
1032
fileids.remove(item[1])
1035
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1036
"""Extract revision objects from the repository
1038
:param branch: The branch being logged.
1039
:param generate_delta: Whether to generate a delta for each revision.
1040
:param search: A user text search string.
1041
:param log_rev_iterator: An input iterator containing all revisions that
1042
could be displayed, in lists.
1043
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1046
repository = branch.repository
1047
for revs in log_rev_iterator:
1048
# r = revision_id, n = revno, d = merge depth
1049
revision_ids = [view[0] for view, _, _ in revs]
1050
revisions = dict(repository.iter_revisions(revision_ids))
1051
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1054
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1055
"""Group up a single large batch into smaller ones.
1057
:param branch: The branch being logged.
1058
:param generate_delta: Whether to generate a delta for each revision.
1059
:param search: A user text search string.
1060
:param log_rev_iterator: An input iterator containing all revisions that
1061
could be displayed, in lists.
1062
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1066
for batch in log_rev_iterator:
1069
step = [detail for _, detail in zip(range(num), batch)]
1073
num = min(int(num * 1.5), 200)
1076
def _get_revision_limits(branch, start_revision, end_revision):
1077
"""Get and check revision limits.
1079
:param branch: The branch containing the revisions.
1081
:param start_revision: The first revision to be logged.
1082
but for merge revision support a RevisionInfo is expected.
1084
:param end_revision: The last revision to be logged.
1085
For backwards compatibility this may be a mainline integer revno,
1086
but for merge revision support a RevisionInfo is expected.
1088
:return: (start_rev_id, end_rev_id) tuple.
1092
if start_revision is not None:
1093
if not isinstance(start_revision, revisionspec.RevisionInfo):
1094
raise TypeError(start_revision)
1095
start_rev_id = start_revision.rev_id
1096
start_revno = start_revision.revno
1097
if start_revno is None:
1102
if end_revision is not None:
1103
if not isinstance(end_revision, revisionspec.RevisionInfo):
1104
raise TypeError(start_revision)
1105
end_rev_id = end_revision.rev_id
1106
end_revno = end_revision.revno
1107
if end_revno is None:
1109
end_revno = branch.revno()
1110
except errors.GhostRevisionsHaveNoRevno:
1113
if branch.last_revision() != _mod_revision.NULL_REVISION:
1114
if (start_rev_id == _mod_revision.NULL_REVISION
1115
or end_rev_id == _mod_revision.NULL_REVISION):
1116
raise errors.BzrCommandError(
1117
gettext('Logging revision 0 is invalid.'))
1118
if end_revno is not None and start_revno > end_revno:
1119
raise errors.BzrCommandError(
1120
gettext("Start revision must be older than the end revision."))
1121
return (start_rev_id, end_rev_id)
1124
def _get_mainline_revs(branch, start_revision, end_revision):
1125
"""Get the mainline revisions from the branch.
1127
Generates the list of mainline revisions for the branch.
1129
:param branch: The branch containing the revisions.
1131
:param start_revision: The first revision to be logged.
1132
For backwards compatibility this may be a mainline integer revno,
1133
but for merge revision support a RevisionInfo is expected.
1135
:param end_revision: The last revision to be logged.
1136
For backwards compatibility this may be a mainline integer revno,
1137
but for merge revision support a RevisionInfo is expected.
1139
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1141
branch_revno, branch_last_revision = branch.last_revision_info()
1142
if branch_revno == 0:
1143
return None, None, None, None
1145
# For mainline generation, map start_revision and end_revision to
1146
# mainline revnos. If the revision is not on the mainline choose the
1147
# appropriate extreme of the mainline instead - the extra will be
1149
# Also map the revisions to rev_ids, to be used in the later filtering
1152
if start_revision is None:
1155
if isinstance(start_revision, revisionspec.RevisionInfo):
1156
start_rev_id = start_revision.rev_id
1157
start_revno = start_revision.revno or 1
1159
branch.check_real_revno(start_revision)
1160
start_revno = start_revision
1163
if end_revision is None:
1164
end_revno = branch_revno
1166
if isinstance(end_revision, revisionspec.RevisionInfo):
1167
end_rev_id = end_revision.rev_id
1168
end_revno = end_revision.revno or branch_revno
1170
branch.check_real_revno(end_revision)
1171
end_revno = end_revision
1173
if ((start_rev_id == _mod_revision.NULL_REVISION)
1174
or (end_rev_id == _mod_revision.NULL_REVISION)):
1175
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1176
if start_revno > end_revno:
1177
raise errors.BzrCommandError(gettext("Start revision must be older "
1178
"than the end revision."))
1180
if end_revno < start_revno:
1181
return None, None, None, None
1182
cur_revno = branch_revno
1185
graph = branch.repository.get_graph()
1186
for revision_id in graph.iter_lefthand_ancestry(
1187
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1188
if cur_revno < start_revno:
1189
# We have gone far enough, but we always add 1 more revision
1190
rev_nos[revision_id] = cur_revno
1191
mainline_revs.append(revision_id)
1193
if cur_revno <= end_revno:
1194
rev_nos[revision_id] = cur_revno
1195
mainline_revs.append(revision_id)
1198
# We walked off the edge of all revisions, so we add a 'None' marker
1199
mainline_revs.append(None)
1201
mainline_revs.reverse()
1203
# override the mainline to look like the revision history.
1204
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1207
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1208
include_merges=True):
1209
r"""Return the list of revision ids which touch a given file id.
1211
The function filters view_revisions and returns a subset.
1212
This includes the revisions which directly change the file id,
1213
and the revisions which merge these changes. So if the
1226
And 'C' changes a file, then both C and D will be returned. F will not be
1227
returned even though it brings the changes to C into the branch starting
1228
with E. (Note that if we were using F as the tip instead of G, then we
1231
This will also be restricted based on a subset of the mainline.
1233
:param branch: The branch where we can get text revision information.
1235
:param file_id: Filter out revisions that do not touch file_id.
1237
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1238
tuples. This is the list of revisions which will be filtered. It is
1239
assumed that view_revisions is in merge_sort order (i.e. newest
1242
:param include_merges: include merge revisions in the result or not
1244
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1246
# Lookup all possible text keys to determine which ones actually modified
1248
graph = branch.repository.get_file_graph()
1249
get_parent_map = graph.get_parent_map
1250
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1252
# Looking up keys in batches of 1000 can cut the time in half, as well as
1253
# memory consumption. GraphIndex *does* like to look for a few keys in
1254
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1255
# TODO: This code needs to be re-evaluated periodically as we tune the
1256
# indexing layer. We might consider passing in hints as to the known
1257
# access pattern (sparse/clustered, high success rate/low success
1258
# rate). This particular access is clustered with a low success rate.
1259
modified_text_revisions = set()
1261
for start in range(0, len(text_keys), chunk_size):
1262
next_keys = text_keys[start:start + chunk_size]
1263
# Only keep the revision_id portion of the key
1264
modified_text_revisions.update(
1265
[k[1] for k in get_parent_map(next_keys)])
1266
del text_keys, next_keys
1269
# Track what revisions will merge the current revision, replace entries
1270
# with 'None' when they have been added to result
1271
current_merge_stack = [None]
1272
for info in view_revisions:
1273
rev_id, revno, depth = info
1274
if depth == len(current_merge_stack):
1275
current_merge_stack.append(info)
1277
del current_merge_stack[depth + 1:]
1278
current_merge_stack[-1] = info
1280
if rev_id in modified_text_revisions:
1281
# This needs to be logged, along with the extra revisions
1282
for idx in range(len(current_merge_stack)):
1283
node = current_merge_stack[idx]
1284
if node is not None:
1285
if include_merges or node[2] == 0:
1287
current_merge_stack[idx] = None
1291
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1292
"""Reverse revisions by depth.
1294
Revisions with a different depth are sorted as a group with the previous
1295
revision of that depth. There may be no topological justification for this
1296
but it looks much nicer.
1298
# Add a fake revision at start so that we can always attach sub revisions
1299
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1301
for val in merge_sorted_revisions:
1302
if val[2] == _depth:
1303
# Each revision at the current depth becomes a chunk grouping all
1304
# higher depth revisions.
1305
zd_revisions.append([val])
1307
zd_revisions[-1].append(val)
1308
for revisions in zd_revisions:
1309
if len(revisions) > 1:
1310
# We have higher depth revisions, let reverse them locally
1311
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1312
zd_revisions.reverse()
1314
for chunk in zd_revisions:
1315
result.extend(chunk)
1317
# Top level call, get rid of the fake revisions that have been added
1318
result = [r for r in result if r[0] is not None and r[1] is not None]
1322
class LogRevision(object):
1323
"""A revision to be logged (by LogFormatter.log_revision).
1325
A simple wrapper for the attributes of a revision to be logged.
1326
The attributes may or may not be populated, as determined by the
1327
logging options and the log formatter capabilities.
1330
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1331
tags=None, diff=None, signature=None):
1336
self.revno = str(revno)
1337
self.merge_depth = merge_depth
1341
self.signature = signature
1344
class LogFormatter(object):
1345
"""Abstract class to display log messages.
1347
At a minimum, a derived class must implement the log_revision method.
1349
If the LogFormatter needs to be informed of the beginning or end of
1350
a log it should implement the begin_log and/or end_log hook methods.
1352
A LogFormatter should define the following supports_XXX flags
1353
to indicate which LogRevision attributes it supports:
1355
- supports_delta must be True if this log formatter supports delta.
1356
Otherwise the delta attribute may not be populated. The 'delta_format'
1357
attribute describes whether the 'short_status' format (1) or the long
1358
one (2) should be used.
1360
- supports_merge_revisions must be True if this log formatter supports
1361
merge revisions. If not, then only mainline revisions will be passed
1364
- preferred_levels is the number of levels this formatter defaults to.
1365
The default value is zero meaning display all levels.
1366
This value is only relevant if supports_merge_revisions is True.
1368
- supports_tags must be True if this log formatter supports tags.
1369
Otherwise the tags attribute may not be populated.
1371
- supports_diff must be True if this log formatter supports diffs.
1372
Otherwise the diff attribute may not be populated.
1374
- supports_signatures must be True if this log formatter supports GPG
1377
Plugins can register functions to show custom revision properties using
1378
the properties_handler_registry. The registered function
1379
must respect the following interface description::
1381
def my_show_properties(properties_dict):
1382
# code that returns a dict {'name':'value'} of the properties
1385
preferred_levels = 0
1387
def __init__(self, to_file, show_ids=False, show_timezone='original',
1388
delta_format=None, levels=None, show_advice=False,
1389
to_exact_file=None, author_list_handler=None):
1390
"""Create a LogFormatter.
1392
:param to_file: the file to output to
1393
:param to_exact_file: if set, gives an output stream to which
1394
non-Unicode diffs are written.
1395
:param show_ids: if True, revision-ids are to be displayed
1396
:param show_timezone: the timezone to use
1397
:param delta_format: the level of delta information to display
1398
or None to leave it to the formatter to decide
1399
:param levels: the number of levels to display; None or -1 to
1400
let the log formatter decide.
1401
:param show_advice: whether to show advice at the end of the
1403
:param author_list_handler: callable generating a list of
1404
authors to display for a given revision
1406
self.to_file = to_file
1407
# 'exact' stream used to show diff, it should print content 'as is'
1408
# and should not try to decode/encode it to unicode to avoid bug
1410
if to_exact_file is not None:
1411
self.to_exact_file = to_exact_file
1413
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1414
# better for code that expects to get diffs to pass in the exact
1416
self.to_exact_file = getattr(to_file, 'stream', to_file)
1417
self.show_ids = show_ids
1418
self.show_timezone = show_timezone
1419
if delta_format is None:
1420
# Ensures backward compatibility
1421
delta_format = 2 # long format
1422
self.delta_format = delta_format
1423
self.levels = levels
1424
self._show_advice = show_advice
1425
self._merge_count = 0
1426
self._author_list_handler = author_list_handler
1428
def get_levels(self):
1429
"""Get the number of levels to display or 0 for all."""
1430
if getattr(self, 'supports_merge_revisions', False):
1431
if self.levels is None or self.levels == -1:
1432
self.levels = self.preferred_levels
1437
def log_revision(self, revision):
1440
:param revision: The LogRevision to be logged.
1442
raise NotImplementedError('not implemented in abstract base')
1444
def show_advice(self):
1445
"""Output user advice, if any, when the log is completed."""
1446
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1447
advice_sep = self.get_advice_separator()
1449
self.to_file.write(advice_sep)
1451
"Use --include-merged or -n0 to see merged revisions.\n")
1453
def get_advice_separator(self):
1454
"""Get the text separating the log from the closing advice."""
1457
def short_committer(self, rev):
1458
name, address = config.parse_username(rev.committer)
1463
def short_author(self, rev):
1464
return self.authors(rev, 'first', short=True, sep=', ')
1466
def authors(self, rev, who, short=False, sep=None):
1467
"""Generate list of authors, taking --authors option into account.
1469
The caller has to specify the name of a author list handler,
1470
as provided by the author list registry, using the ``who``
1471
argument. That name only sets a default, though: when the
1472
user selected a different author list generation using the
1473
``--authors`` command line switch, as represented by the
1474
``author_list_handler`` constructor argument, that value takes
1477
:param rev: The revision for which to generate the list of authors.
1478
:param who: Name of the default handler.
1479
:param short: Whether to shorten names to either name or address.
1480
:param sep: What separator to use for automatic concatenation.
1482
if self._author_list_handler is not None:
1483
# The user did specify --authors, which overrides the default
1484
author_list_handler = self._author_list_handler
1486
# The user didn't specify --authors, so we use the caller's default
1487
author_list_handler = author_list_registry.get(who)
1488
names = author_list_handler(rev)
1490
for i in range(len(names)):
1491
name, address = config.parse_username(names[i])
1497
names = sep.join(names)
1500
def merge_marker(self, revision):
1501
"""Get the merge marker to include in the output or '' if none."""
1502
if len(revision.rev.parent_ids) > 1:
1503
self._merge_count += 1
1508
def show_properties(self, revision, indent):
1509
"""Displays the custom properties returned by each registered handler.
1511
If a registered handler raises an error it is propagated.
1513
for line in self.custom_properties(revision):
1514
self.to_file.write("%s%s\n" % (indent, line))
1516
def custom_properties(self, revision):
1517
"""Format the custom properties returned by each registered handler.
1519
If a registered handler raises an error it is propagated.
1521
:return: a list of formatted lines (excluding trailing newlines)
1523
lines = self._foreign_info_properties(revision)
1524
for key, handler in properties_handler_registry.iteritems():
1525
lines.extend(self._format_properties(handler(revision)))
1528
def _foreign_info_properties(self, rev):
1529
"""Custom log displayer for foreign revision identifiers.
1531
:param rev: Revision object.
1533
# Revision comes directly from a foreign repository
1534
if isinstance(rev, foreign.ForeignRevision):
1535
return self._format_properties(
1536
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1538
# Imported foreign revision revision ids always contain :
1539
if b":" not in rev.revision_id:
1542
# Revision was once imported from a foreign repository
1544
foreign_revid, mapping = \
1545
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1546
except errors.InvalidRevisionId:
1549
return self._format_properties(
1550
mapping.vcs.show_foreign_revid(foreign_revid))
1552
def _format_properties(self, properties):
1554
for key, value in properties.items():
1555
lines.append(key + ': ' + value)
1558
def show_diff(self, to_file, diff, indent):
1559
encoding = get_terminal_encoding()
1560
for l in diff.rstrip().split(b'\n'):
1561
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1564
# Separator between revisions in long format
1565
_LONG_SEP = '-' * 60
1568
class LongLogFormatter(LogFormatter):
1570
supports_merge_revisions = True
1571
preferred_levels = 1
1572
supports_delta = True
1573
supports_tags = True
1574
supports_diff = True
1575
supports_signatures = True
1577
def __init__(self, *args, **kwargs):
1578
super(LongLogFormatter, self).__init__(*args, **kwargs)
1579
if self.show_timezone == 'original':
1580
self.date_string = self._date_string_original_timezone
1582
self.date_string = self._date_string_with_timezone
1584
def _date_string_with_timezone(self, rev):
1585
return format_date(rev.timestamp, rev.timezone or 0,
1588
def _date_string_original_timezone(self, rev):
1589
return format_date_with_offset_in_original_timezone(rev.timestamp,
1592
def log_revision(self, revision):
1593
"""Log a revision, either merged or not."""
1594
indent = ' ' * revision.merge_depth
1596
if revision.revno is not None:
1597
lines.append('revno: %s%s' % (revision.revno,
1598
self.merge_marker(revision)))
1600
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1601
if self.show_ids or revision.revno is None:
1602
lines.append('revision-id: %s' %
1603
(revision.rev.revision_id.decode('utf-8'),))
1605
for parent_id in revision.rev.parent_ids:
1606
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1607
lines.extend(self.custom_properties(revision.rev))
1609
committer = revision.rev.committer
1610
authors = self.authors(revision.rev, 'all')
1611
if authors != [committer]:
1612
lines.append('author: %s' % (", ".join(authors),))
1613
lines.append('committer: %s' % (committer,))
1615
branch_nick = revision.rev.properties.get('branch-nick', None)
1616
if branch_nick is not None:
1617
lines.append('branch nick: %s' % (branch_nick,))
1619
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1621
if revision.signature is not None:
1622
lines.append('signature: ' + revision.signature)
1624
lines.append('message:')
1625
if not revision.rev.message:
1626
lines.append(' (no message)')
1628
message = revision.rev.message.rstrip('\r\n')
1629
for l in message.split('\n'):
1630
lines.append(' %s' % (l,))
1632
# Dump the output, appending the delta and diff if requested
1633
to_file = self.to_file
1634
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1635
if revision.delta is not None:
1636
# Use the standard status output to display changes
1637
from breezy.delta import report_delta
1638
report_delta(to_file, revision.delta, short_status=False,
1639
show_ids=self.show_ids, indent=indent)
1640
if revision.diff is not None:
1641
to_file.write(indent + 'diff:\n')
1643
# Note: we explicitly don't indent the diff (relative to the
1644
# revision information) so that the output can be fed to patch -p0
1645
self.show_diff(self.to_exact_file, revision.diff, indent)
1646
self.to_exact_file.flush()
1648
def get_advice_separator(self):
1649
"""Get the text separating the log from the closing advice."""
1650
return '-' * 60 + '\n'
1653
class ShortLogFormatter(LogFormatter):
1655
supports_merge_revisions = True
1656
preferred_levels = 1
1657
supports_delta = True
1658
supports_tags = True
1659
supports_diff = True
1661
def __init__(self, *args, **kwargs):
1662
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1663
self.revno_width_by_depth = {}
1665
def log_revision(self, revision):
1666
# We need two indents: one per depth and one for the information
1667
# relative to that indent. Most mainline revnos are 5 chars or
1668
# less while dotted revnos are typically 11 chars or less. Once
1669
# calculated, we need to remember the offset for a given depth
1670
# as we might be starting from a dotted revno in the first column
1671
# and we want subsequent mainline revisions to line up.
1672
depth = revision.merge_depth
1673
indent = ' ' * depth
1674
revno_width = self.revno_width_by_depth.get(depth)
1675
if revno_width is None:
1676
if revision.revno is None or revision.revno.find('.') == -1:
1677
# mainline revno, e.g. 12345
1680
# dotted revno, e.g. 12345.10.55
1682
self.revno_width_by_depth[depth] = revno_width
1683
offset = ' ' * (revno_width + 1)
1685
to_file = self.to_file
1688
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1689
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1690
revision.revno or "", self.short_author(
1692
format_date(revision.rev.timestamp,
1693
revision.rev.timezone or 0,
1694
self.show_timezone, date_fmt="%Y-%m-%d",
1696
tags, self.merge_marker(revision)))
1697
self.show_properties(revision.rev, indent + offset)
1698
if self.show_ids or revision.revno is None:
1699
to_file.write(indent + offset + 'revision-id:%s\n'
1700
% (revision.rev.revision_id.decode('utf-8'),))
1701
if not revision.rev.message:
1702
to_file.write(indent + offset + '(no message)\n')
1704
message = revision.rev.message.rstrip('\r\n')
1705
for l in message.split('\n'):
1706
to_file.write(indent + offset + '%s\n' % (l,))
1708
if revision.delta is not None:
1709
# Use the standard status output to display changes
1710
from breezy.delta import report_delta
1711
report_delta(to_file, revision.delta,
1712
short_status=self.delta_format == 1,
1713
show_ids=self.show_ids, indent=indent + offset)
1714
if revision.diff is not None:
1715
self.show_diff(self.to_exact_file, revision.diff, ' ')
1719
class LineLogFormatter(LogFormatter):
1721
supports_merge_revisions = True
1722
preferred_levels = 1
1723
supports_tags = True
1725
def __init__(self, *args, **kwargs):
1726
super(LineLogFormatter, self).__init__(*args, **kwargs)
1727
width = terminal_width()
1728
if width is not None:
1729
# we need one extra space for terminals that wrap on last char
1731
self._max_chars = width
1733
def truncate(self, str, max_len):
1734
if max_len is None or len(str) <= max_len:
1736
return str[:max_len - 3] + '...'
1738
def date_string(self, rev):
1739
return format_date(rev.timestamp, rev.timezone or 0,
1740
self.show_timezone, date_fmt="%Y-%m-%d",
1743
def message(self, rev):
1745
return '(no message)'
1749
def log_revision(self, revision):
1750
indent = ' ' * revision.merge_depth
1751
self.to_file.write(self.log_string(revision.revno, revision.rev,
1752
self._max_chars, revision.tags, indent))
1753
self.to_file.write('\n')
1755
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1756
"""Format log info into one string. Truncate tail of string
1758
:param revno: revision number or None.
1759
Revision numbers counts from 1.
1760
:param rev: revision object
1761
:param max_chars: maximum length of resulting string
1762
:param tags: list of tags or None
1763
:param prefix: string to prefix each line
1764
:return: formatted truncated string
1768
# show revno only when is not None
1769
out.append("%s:" % revno)
1770
if max_chars is not None:
1771
out.append(self.truncate(
1772
self.short_author(rev), (max_chars + 3) // 4))
1774
out.append(self.short_author(rev))
1775
out.append(self.date_string(rev))
1776
if len(rev.parent_ids) > 1:
1777
out.append('[merge]')
1779
tag_str = '{%s}' % (', '.join(sorted(tags)))
1781
out.append(rev.get_summary())
1782
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1785
class GnuChangelogLogFormatter(LogFormatter):
1787
supports_merge_revisions = True
1788
supports_delta = True
1790
def log_revision(self, revision):
1791
"""Log a revision, either merged or not."""
1792
to_file = self.to_file
1794
date_str = format_date(revision.rev.timestamp,
1795
revision.rev.timezone or 0,
1797
date_fmt='%Y-%m-%d',
1799
committer_str = self.authors(revision.rev, 'first', sep=', ')
1800
committer_str = committer_str.replace(' <', ' <')
1801
to_file.write('%s %s\n\n' % (date_str, committer_str))
1803
if revision.delta is not None and revision.delta.has_changed():
1804
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1806
to_file.write('\t* %s:\n' % (path,))
1807
for c in revision.delta.renamed:
1808
oldpath, newpath = c[:2]
1809
# For renamed files, show both the old and the new path
1810
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1813
if not revision.rev.message:
1814
to_file.write('\tNo commit message\n')
1816
message = revision.rev.message.rstrip('\r\n')
1817
for l in message.split('\n'):
1818
to_file.write('\t%s\n' % (l.lstrip(),))
1822
def line_log(rev, max_chars):
1823
lf = LineLogFormatter(None)
1824
return lf.log_string(None, rev, max_chars)
1827
class LogFormatterRegistry(registry.Registry):
1828
"""Registry for log formatters"""
1830
def make_formatter(self, name, *args, **kwargs):
1831
"""Construct a formatter from arguments.
1833
:param name: Name of the formatter to construct. 'short', 'long' and
1834
'line' are built-in.
1836
return self.get(name)(*args, **kwargs)
1838
def get_default(self, branch):
1839
c = branch.get_config_stack()
1840
return self.get(c.get('log_format'))
1843
log_formatter_registry = LogFormatterRegistry()
1846
log_formatter_registry.register('short', ShortLogFormatter,
1847
'Moderately short log format.')
1848
log_formatter_registry.register('long', LongLogFormatter,
1849
'Detailed log format.')
1850
log_formatter_registry.register('line', LineLogFormatter,
1851
'Log format with one line per revision.')
1852
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1853
'Format used by GNU ChangeLog files.')
1856
def register_formatter(name, formatter):
1857
log_formatter_registry.register(name, formatter)
1860
def log_formatter(name, *args, **kwargs):
1861
"""Construct a formatter from arguments.
1863
name -- Name of the formatter to construct; currently 'long', 'short' and
1864
'line' are supported.
1867
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1869
raise errors.BzrCommandError(
1870
gettext("unknown log formatter: %r") % name)
1873
def author_list_all(rev):
1874
return rev.get_apparent_authors()[:]
1877
def author_list_first(rev):
1878
lst = rev.get_apparent_authors()
1885
def author_list_committer(rev):
1886
return [rev.committer]
1889
author_list_registry = registry.Registry()
1891
author_list_registry.register('all', author_list_all,
1894
author_list_registry.register('first', author_list_first,
1897
author_list_registry.register('committer', author_list_committer,
1901
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1903
"""Show the change in revision history comparing the old revision history to the new one.
1905
:param branch: The branch where the revisions exist
1906
:param old_rh: The old revision history
1907
:param new_rh: The new revision history
1908
:param to_file: A file to write the results to. If None, stdout will be used
1911
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1913
lf = log_formatter(log_format,
1916
show_timezone='original')
1918
# This is the first index which is different between
1921
for i in range(max(len(new_rh), len(old_rh))):
1922
if (len(new_rh) <= i
1924
or new_rh[i] != old_rh[i]):
1928
if base_idx is None:
1929
to_file.write('Nothing seems to have changed\n')
1931
# TODO: It might be nice to do something like show_log
1932
# and show the merged entries. But since this is the
1933
# removed revisions, it shouldn't be as important
1934
if base_idx < len(old_rh):
1935
to_file.write('*' * 60)
1936
to_file.write('\nRemoved Revisions:\n')
1937
for i in range(base_idx, len(old_rh)):
1938
rev = branch.repository.get_revision(old_rh[i])
1939
lr = LogRevision(rev, i + 1, 0, None)
1941
to_file.write('*' * 60)
1942
to_file.write('\n\n')
1943
if base_idx < len(new_rh):
1944
to_file.write('Added Revisions:\n')
1949
direction='forward',
1950
start_revision=base_idx + 1,
1951
end_revision=len(new_rh),
1955
def get_history_change(old_revision_id, new_revision_id, repository):
1956
"""Calculate the uncommon lefthand history between two revisions.
1958
:param old_revision_id: The original revision id.
1959
:param new_revision_id: The new revision id.
1960
:param repository: The repository to use for the calculation.
1962
return old_history, new_history
1965
old_revisions = set()
1967
new_revisions = set()
1968
graph = repository.get_graph()
1969
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1970
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1971
stop_revision = None
1974
while do_new or do_old:
1977
new_revision = next(new_iter)
1978
except StopIteration:
1981
new_history.append(new_revision)
1982
new_revisions.add(new_revision)
1983
if new_revision in old_revisions:
1984
stop_revision = new_revision
1988
old_revision = next(old_iter)
1989
except StopIteration:
1992
old_history.append(old_revision)
1993
old_revisions.add(old_revision)
1994
if old_revision in new_revisions:
1995
stop_revision = old_revision
1997
new_history.reverse()
1998
old_history.reverse()
1999
if stop_revision is not None:
2000
new_history = new_history[new_history.index(stop_revision) + 1:]
2001
old_history = old_history[old_history.index(stop_revision) + 1:]
2002
return old_history, new_history
2005
def show_branch_change(branch, output, old_revno, old_revision_id):
2006
"""Show the changes made to a branch.
2008
:param branch: The branch to show changes about.
2009
:param output: A file-like object to write changes to.
2010
:param old_revno: The revno of the old tip.
2011
:param old_revision_id: The revision_id of the old tip.
2013
new_revno, new_revision_id = branch.last_revision_info()
2014
old_history, new_history = get_history_change(old_revision_id,
2017
if old_history == [] and new_history == []:
2018
output.write('Nothing seems to have changed\n')
2021
log_format = log_formatter_registry.get_default(branch)
2022
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2023
if old_history != []:
2024
output.write('*' * 60)
2025
output.write('\nRemoved Revisions:\n')
2026
show_flat_log(branch.repository, old_history, old_revno, lf)
2027
output.write('*' * 60)
2028
output.write('\n\n')
2029
if new_history != []:
2030
output.write('Added Revisions:\n')
2031
start_revno = new_revno - len(new_history) + 1
2032
show_log(branch, lf, None, verbose=False, direction='forward',
2033
start_revision=start_revno)
2036
def show_flat_log(repository, history, last_revno, lf):
2037
"""Show a simple log of the specified history.
2039
:param repository: The repository to retrieve revisions from.
2040
:param history: A list of revision_ids indicating the lefthand history.
2041
:param last_revno: The revno of the last revision_id in the history.
2042
:param lf: The log formatter to use.
2044
revisions = repository.get_revisions(history)
2045
for i, rev in enumerate(revisions):
2046
lr = LogRevision(rev, i + last_revno, 0, None)
2050
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2051
"""Find file-ids and kinds given a list of files and a revision range.
2053
We search for files at the end of the range. If not found there,
2054
we try the start of the range.
2056
:param revisionspec_list: revision range as parsed on the command line
2057
:param file_list: the list of paths given on the command line;
2058
the first of these can be a branch location or a file path,
2059
the remainder must be file paths
2060
:param add_cleanup: When the branch returned is read locked,
2061
an unlock call will be queued to the cleanup.
2062
:return: (branch, info_list, start_rev_info, end_rev_info) where
2063
info_list is a list of (relative_path, file_id, kind) tuples where
2064
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2065
branch will be read-locked.
2067
from breezy.builtins import _get_revision_range
2068
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2070
add_cleanup(b.lock_read().unlock)
2071
# XXX: It's damn messy converting a list of paths to relative paths when
2072
# those paths might be deleted ones, they might be on a case-insensitive
2073
# filesystem and/or they might be in silly locations (like another branch).
2074
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2075
# file2 implicitly in the same dir as file1 or should its directory be
2076
# taken from the current tree somehow?) For now, this solves the common
2077
# case of running log in a nested directory, assuming paths beyond the
2078
# first one haven't been deleted ...
2080
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2082
relpaths = [path] + file_list[1:]
2084
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2086
if relpaths in ([], [u'']):
2087
return b, [], start_rev_info, end_rev_info
2088
if start_rev_info is None and end_rev_info is None:
2090
tree = b.basis_tree()
2093
file_id = tree.path2id(fp)
2094
kind = _get_kind_for_file_id(tree, fp, file_id)
2096
# go back to when time began
2099
rev1 = b.get_rev_id(1)
2100
except errors.NoSuchRevision:
2105
tree1 = b.repository.revision_tree(rev1)
2107
file_id = tree1.path2id(fp)
2108
kind = _get_kind_for_file_id(tree1, fp, file_id)
2109
info_list.append((fp, file_id, kind))
2111
elif start_rev_info == end_rev_info:
2112
# One revision given - file must exist in it
2113
tree = b.repository.revision_tree(end_rev_info.rev_id)
2115
file_id = tree.path2id(fp)
2116
kind = _get_kind_for_file_id(tree, fp, file_id)
2117
info_list.append((fp, file_id, kind))
2120
# Revision range given. Get the file-id from the end tree.
2121
# If that fails, try the start tree.
2122
rev_id = end_rev_info.rev_id
2124
tree = b.basis_tree()
2126
tree = b.repository.revision_tree(rev_id)
2129
file_id = tree.path2id(fp)
2130
kind = _get_kind_for_file_id(tree, fp, file_id)
2133
rev_id = start_rev_info.rev_id
2135
rev1 = b.get_rev_id(1)
2136
tree1 = b.repository.revision_tree(rev1)
2138
tree1 = b.repository.revision_tree(rev_id)
2139
file_id = tree1.path2id(fp)
2140
kind = _get_kind_for_file_id(tree1, fp, file_id)
2141
info_list.append((fp, file_id, kind))
2142
return b, info_list, start_rev_info, end_rev_info
2145
def _get_kind_for_file_id(tree, path, file_id):
2146
"""Return the kind of a file-id or None if it doesn't exist."""
2147
if file_id is not None:
2148
return tree.kind(path)
2153
properties_handler_registry = registry.Registry()
2155
# Use the properties handlers to print out bug information if available
2158
def _bugs_properties_handler(revision):
2160
if 'bugs' in revision.properties:
2161
bug_lines = revision.properties['bugs'].split('\n')
2162
bug_rows = [line.split(' ', 1) for line in bug_lines]
2163
fixed_bug_urls = [row[0] for row in bug_rows if
2164
len(row) > 1 and row[1] == 'fixed']
2165
related_bug_urls = [row[0] for row in bug_rows if
2166
len(row) > 1 and row[1] == 'related']
2168
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2169
ret[text] = ' '.join(fixed_bug_urls)
2170
if related_bug_urls:
2171
text = ngettext('related bug', 'related bugs',
2172
len(related_bug_urls))
2173
ret[text] = ' '.join(related_bug_urls)
2177
properties_handler_registry.register('bugs_properties_handler',
2178
_bugs_properties_handler)
2181
# adapters which revision ids to log are filtered. When log is called, the
2182
# log_rev_iterator is adapted through each of these factory methods.
2183
# Plugins are welcome to mutate this list in any way they like - as long
2184
# as the overall behaviour is preserved. At this point there is no extensible
2185
# mechanism for getting parameters to each factory method, and until there is
2186
# this won't be considered a stable api.
2190
# read revision objects
2191
_make_revision_objects,
2192
# filter on log messages
2193
_make_search_filter,
2194
# generate deltas for things we will show